def to_high_level_kdata(kdata_df: pd.DataFrame, to_level: IntervalLevel): def to_close(s): if pd_is_not_null(s): return s[-1] def to_open(s): if pd_is_not_null(s): return s[0] def to_high(s): return np.max(s) def to_low(s): return np.min(s) def to_sum(s): return np.sum(s) original_level = kdata_df['level'][0] entity_id = kdata_df['entity_id'][0] provider = kdata_df['provider'][0] name = kdata_df['name'][0] code = kdata_df['code'][0] entity_type, _, _ = decode_entity_id(entity_id=entity_id) assert IntervalLevel(original_level) <= IntervalLevel.LEVEL_1DAY assert IntervalLevel(original_level) < IntervalLevel(to_level) df: pd.DataFrame = None if to_level == IntervalLevel.LEVEL_1WEEK: # loffset='-2' 用周五作为时间标签 if entity_type == 'stock': df = kdata_df.resample('W', loffset=pd.DateOffset(days=-2)).apply({'close': to_close, 'open': to_open, 'high': to_high, 'low': to_low, 'volume': to_sum, 'turnover': to_sum}) else: df = kdata_df.resample('W', loffset=pd.DateOffset(days=-2)).apply({'close': to_close, 'open': to_open, 'high': to_high, 'low': to_low, 'volume': to_sum, 'turnover': to_sum}) df = df.dropna() # id entity_id timestamp provider code name level df['entity_id'] = entity_id df['provider'] = provider df['code'] = code df['name'] = name return df
def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=zvt_config['batch_size'], real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq, share_para=None) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type=EntityType.Stock, level=level, adjust_type=adjust_type) self.bao_trading_level = to_bao_trading_level(level) super().__init__(EntityType.Stock, exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes, share_para=share_para) self.adjust_type = adjust_type
def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type='stock', level=level, adjust_type=adjust_type) self.tl_trading_level = to_jq_trading_level(level) if self.tl_trading_level != "1d": self.logger.info('通联数据目前仅支持日K线,level入参仅支持:1d,实际level入参为'.format(self.tl_trading_level)) raise Exception super().__init__('stock', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type self.tonglian_conn = create_engine( f"mysql://{zvt_env['tl_username']}:{zvt_env['tl_password']}@{zvt_env['tl_server_address']}:" f"{zvt_env['tl_server_port']}/{zvt_env['tl_db_name']}?charset=utf8mb4", pool_recycle=3600, echo=False).connect()
def __init__(self, exchanges=['sh', 'sz'], schema=None, entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, ) -> None: level = IntervalLevel(level) self.data_schema = get_stock_factor_schema(schema) self.jq_trading_level = to_jq_trading_level(level) super().__init__('stock', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) auth(zvt_env['jq_username'], zvt_env['jq_password'])
def __init__(self, # exchanges=['hk','sh','sz','o','a','n'], exchanges=['o','a','n'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type='stock', level=level, adjust_type=adjust_type) self.jq_trading_level = to_jq_trading_level(level) super().__init__('stock', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type # 调用登录函数(激活后使用,不需要用户名密码) loginResult = c.start("ForceLogin=1", '') if (loginResult.ErrorCode != 0): print("login in fail") exit()
def __init__(self, entity_type='stock', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=False, batch_size=10, force_update=True, sleeping_time=10, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0, # child add level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, one_day_trading_minutes=24 * 60) -> None: super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute) self.level = IntervalLevel(level) self.kdata_use_begin_time = kdata_use_begin_time self.one_day_trading_minutes = one_day_trading_minutes
def __init__(self, force_update=True, sleeping_time=10, exchanges=None, entity_ids=None, code=None, codes=None, day_data=False, entity_filters=None, ignore_failed=True, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, one_day_trading_minutes=24 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) self.adjust_type = AdjustType(adjust_type) self.entity_type = self.entity_schema.__name__.lower() self.data_schema = get_kdata_schema(entity_type=self.entity_type, level=level, adjust_type=self.adjust_type) super().__init__(force_update, sleeping_time, exchanges, entity_ids, code, codes, day_data, entity_filters, ignore_failed, real_time, fix_duplicate_way, start_timestamp, end_timestamp, level, kdata_use_begin_time, one_day_trading_minutes)
def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=True, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60) -> None: level = IntervalLevel(level) self.data_schema = get_kdata_schema(entity_type='index', level=level) self.jq_trading_level = to_jq_trading_level(level) super().__init__('index', exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) get_token(zvt_config['jq_username'], zvt_config['jq_password'], force=True)
def __init__(self, exchanges=['huobipro', 'binance'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type='coin', level=level, adjust_type=adjust_type) super().__init__('coin', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type
def __init__(self, force_update=True, sleeping_time=10, exchanges=None, entity_ids=None, code=None, codes=None, day_data=False, entity_filters=None, ignore_failed=True, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, one_day_trading_minutes=24 * 60) -> None: super().__init__(force_update, sleeping_time, exchanges, entity_ids, code=code, codes=codes, day_data=day_data, entity_filters=entity_filters, ignore_failed=ignore_failed, real_time=real_time, fix_duplicate_way=fix_duplicate_way, start_timestamp=start_timestamp, end_timestamp=end_timestamp) self.level = IntervalLevel(level) self.kdata_use_begin_time = kdata_use_begin_time self.one_day_trading_minutes = one_day_trading_minutes
def __init__(self, entity_type='coin', exchanges=None, entity_ids=None, codes=None, day_data=False, batch_size=10, force_update=True, sleeping_time=1, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, one_day_trading_minutes=24 * 60, entity_filters=None) -> None: if entity_filters is None: entity_filters = [Coin.code.contains('/USDT')] level = IntervalLevel(level) self.data_schema = get_kdata_schema(entity_type=entity_type, level=level, adjust_type=None) self.ccxt_trading_level = level.value super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes, entity_filters)
def get_ma_stats_factor_schema(entity_type: str, level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY): if type(level) == str: level = IntervalLevel(level) schema_str = "{}{}MaStatsFactor".format(entity_type.capitalize(), level.value.capitalize()) return get_schema_by_name(schema_str)
def __init__(self, entity_type='etf', exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type=entity_type, level=level, adjust_type=adjust_type) self.jq_trading_level = to_jq_trading_level(level) super().__init__('stock', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type auth(zvt_env['jq_username'], zvt_env['jq_password']) print(f"剩余{get_query_count()['spare']/10000}万")
def get_fxrate_kdata_schema(entity_type: str, level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY): if type(level) == str: level = IntervalLevel(level) schema_str = '{}{}Kdata'.format(entity_type, level.value.capitalize()) return eval(schema_str)
def get_zen_factor_schema(entity_type: str, level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY): if type(level) == str: level = IntervalLevel(level) schema_str = '{}{}ZenFactor'.format(entity_type.capitalize(), level.value.capitalize()) return eval(schema_str)
def get_z_factor_schema(entity_type: str, level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY): if type(level) == str: level = IntervalLevel(level) # z factor schema rule # 1)name:{SecurityType.value.capitalize()}{IntervalLevel.value.upper()}ZFactor schema_str = "{}{}ZFactor".format(entity_type.capitalize(), level.value.capitalize()) return get_schema_by_name(schema_str)
def get_ma_state_stats_schema(entity_type: str, level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY): if type(level) == str: level = IntervalLevel(level) # ma state stats schema rule # 1)name:{SecurityType.value.capitalize()}{IntervalLevel.value.upper()}MaStateStats schema_str = '{}{}MaStateStats'.format(entity_type.capitalize(), level.value.capitalize()) return eval(schema_str)
def level_flag(level: IntervalLevel): level = IntervalLevel(level) if level == IntervalLevel.LEVEL_1DAY: return 101 if level == IntervalLevel.LEVEL_1WEEK: return 102 if level == IntervalLevel.LEVEL_1MON: return 103 assert False
def __init__( self, force_update=True, sleeping_time=10, exchanges=None, entity_id=None, entity_ids=None, code=None, codes=None, day_data=False, entity_filters=None, ignore_failed=True, real_time=False, fix_duplicate_way="ignore", start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, one_day_trading_minutes=24 * 60, adjust_type=AdjustType.qfq, ) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type="stock", level=level, adjust_type=adjust_type) self.jq_trading_level = to_jq_trading_level(level) super().__init__( force_update, sleeping_time, exchanges, entity_id, entity_ids, code, codes, day_data, entity_filters, ignore_failed, real_time, fix_duplicate_way, start_timestamp, end_timestamp, level, kdata_use_begin_time, one_day_trading_minutes, ) self.adjust_type = adjust_type get_token(zvt_config["jq_username"], zvt_config["jq_password"], force=True)
def get_kdata_schema(entity_type: str, level: Union[IntervalLevel, str] = IntervalLevel.LEVEL_1DAY, adjust_type: Union[AdjustType, str] = None): if type(level) == str: level = IntervalLevel(level) if type(adjust_type) == str: adjust_type = AdjustType(adjust_type) # kdata schema rule # 1)name:{entity_type.capitalize()}{IntervalLevel.value.upper()}Kdata if adjust_type and (adjust_type != AdjustType.qfq): schema_str = '{}{}{}Kdata'.format(entity_type.capitalize(), level.value.capitalize(), adjust_type.value.capitalize()) else: schema_str = '{}{}Kdata'.format(entity_type.capitalize(), level.value.capitalize()) return get_schema_by_name(schema_str)
def to_em_level_flag(level: IntervalLevel): level = IntervalLevel(level) if level == IntervalLevel.LEVEL_5MIN: return 5 if level == IntervalLevel.LEVEL_15MIN: return 15 elif level == IntervalLevel.LEVEL_30MIN: return 30 elif level == IntervalLevel.LEVEL_1HOUR: return 60 elif level == IntervalLevel.LEVEL_1DAY: return 101 elif level == IntervalLevel.LEVEL_1WEEK: return 102 elif level == IntervalLevel.LEVEL_1MON: return 103 assert False
def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type='stock', level=level, adjust_type=adjust_type) self.bs_trading_level = to_bs_trading_level(level) super().__init__('stock', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type print("尝试登陆baostock") #####login##### lg = bs.login(user_id="anonymous", password="******") if (lg.error_code == '0'): print("登陆成功") else: print("登录失败")
def gen_kdata_schema( pkg: str, providers: List[str], entity_type: str, levels: List[IntervalLevel], adjust_types=None, entity_in_submodule: bool = False, kdata_module="quotes", ): if adjust_types is None: adjust_types = [None] tables = [] base_path = "./domain" if kdata_module: base_path = os.path.join(base_path, kdata_module) if entity_in_submodule: base_path = os.path.join(base_path, entity_type) if not os.path.exists(base_path): logger.info(f"create dir {base_path}") os.makedirs(base_path) for level in levels: for adjust_type in adjust_types: level = IntervalLevel(level) cap_entity_type = entity_type.capitalize() cap_level = level.value.capitalize() # you should define {EntityType}KdataCommon in kdata_module at first kdata_common = f"{cap_entity_type}KdataCommon" if adjust_type and (adjust_type != AdjustType.qfq): class_name = f"{cap_entity_type}{cap_level}{adjust_type.value.capitalize()}Kdata" table_name = f"{entity_type}_{level.value}_{adjust_type.value.lower()}_kdata" else: class_name = f"{cap_entity_type}{cap_level}Kdata" table_name = f"{entity_type}_{level.value}_kdata" tables.append(table_name) schema_template = f"""# -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from {pkg}.domain.{kdata_module} import {kdata_common} KdataBase = declarative_base() class {class_name}(KdataBase, {kdata_common}): __tablename__ = '{table_name}' register_schema(providers={providers}, db_name='{table_name}', schema_base=KdataBase, entity_type='{entity_type}') """ # generate the schema with open(os.path.join(base_path, f"{table_name}.py"), "w") as outfile: outfile.write(schema_template) # generate the package pkg_file = os.path.join(base_path, "__init__.py") if not os.path.exists(pkg_file): package_template = """# -*- coding: utf-8 -*- """ with open(pkg_file, "w") as outfile: outfile.write(package_template) # generate exports gen_exports("./domain")
def __init__(self, region: Region, data_schema: Type[Mixin], entity_schema: Type[EntityMixin], provider: Provider = Provider.Default, entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List = None, filters: List = None, order: object = None, limit: int = None, level: IntervalLevel = None, category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None) -> None: super().__init__() self.logger = logging.getLogger(self.__class__.__name__) self.data_schema = data_schema self.entity_schema = entity_schema self.region = region self.provider = provider if end_timestamp is None: end_timestamp = now_pd_timestamp(self.region) self.the_timestamp = the_timestamp if the_timestamp: self.start_timestamp = the_timestamp self.end_timestamp = the_timestamp else: self.start_timestamp = start_timestamp self.end_timestamp = end_timestamp self.start_timestamp = to_pd_timestamp(self.start_timestamp) self.end_timestamp = to_pd_timestamp(self.end_timestamp) self.exchanges = exchanges if codes: if type(codes) == str: codes = codes.replace(' ', '') if codes.startswith('[') and codes.endswith(']'): codes = json.loads(codes) else: codes = codes.split(',') self.codes = codes self.entity_ids = entity_ids # 转换成标准entity_id if entity_schema and not self.entity_ids: df = get_entities(region=self.region, entity_schema=entity_schema, provider=self.provider, exchanges=self.exchanges, codes=self.codes) if pd_is_not_null(df): self.entity_ids = df['entity_id'].to_list() self.filters = filters self.order = order self.limit = limit if level: self.level = IntervalLevel(level) else: self.level = level self.category_field = category_field self.time_field = time_field self.computing_window = computing_window self.category_col = eval('self.data_schema.{}'.format( self.category_field)) self.time_col = eval('self.data_schema.{}'.format(self.time_field)) self.columns = columns # we store the data in a multiple index(category_column,timestamp) Dataframe if self.columns: # support str if type(columns[0]) == str: self.columns = [] for col in columns: self.columns.append(eval('data_schema.{}'.format(col))) # always add category_column and time_field for normalizing self.columns = list( set(self.columns) | {self.category_col, self.time_col}) self.data_listeners: List[DataListener] = [] self.data_df: pd.DataFrame = None self.load_data()
def to_high_level_kdata(kdata_df: pd.DataFrame, to_level: IntervalLevel): def to_close(s): if pd_is_not_null(s): return s[-1] def to_open(s): if pd_is_not_null(s): return s[0] def to_high(s): return np.max(s) def to_low(s): return np.min(s) def to_sum(s): return np.sum(s) original_level = kdata_df["level"][0] entity_id = kdata_df["entity_id"][0] provider = kdata_df["provider"][0] name = kdata_df["name"][0] code = kdata_df["code"][0] entity_type, _, _ = decode_entity_id(entity_id=entity_id) assert IntervalLevel(original_level) <= IntervalLevel.LEVEL_1DAY assert IntervalLevel(original_level) < IntervalLevel(to_level) df: pd.DataFrame = None if to_level == IntervalLevel.LEVEL_1WEEK: # loffset='-2' 用周五作为时间标签 if entity_type == "stock": df = kdata_df.resample("W", loffset=pd.DateOffset(days=-2)).apply({ "close": to_close, "open": to_open, "high": to_high, "low": to_low, "volume": to_sum, "turnover": to_sum, }) else: df = kdata_df.resample("W", loffset=pd.DateOffset(days=-2)).apply({ "close": to_close, "open": to_open, "high": to_high, "low": to_low, "volume": to_sum, "turnover": to_sum, }) df = df.dropna() # id entity_id timestamp provider code name level df["entity_id"] = entity_id df["provider"] = provider df["code"] = code df["name"] = name return df
pass __all__ = ['BaoChinaStockKdataRecorder'] if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--level', help='trading level', default='1d', choices=[item.value for item in IntervalLevel]) parser.add_argument('--codes', help='codes', default=['000001'], nargs='+') args = parser.parse_args() level = IntervalLevel(args.level) codes = args.codes init_log('bao_china_stock_{}_kdata.log'.format(args.level)) BaoChinaStockKdataRecorder(level=level, sleeping_time=0, codes=codes, real_time=False, adjust_type=AdjustType.hfq).run() print( get_kdata(region=Region.CHN, entity_id='stock_sz_000001', limit=10, order=Stock1dHfqKdata.timestamp.desc(), adjust_type=AdjustType.hfq))
def __init__(self, entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, provider: str = None, level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, trader_name: str = None, real_time: bool = False, kdata_use_begin_time: bool = False, draw_result: bool = True) -> None: assert self.entity_schema is not None self.logger = logging.getLogger(__name__) if trader_name: self.trader_name = trader_name else: self.trader_name = type(self).__name__.lower() self.trading_signal_listeners: List[TradingListener] = [] self.selectors: List[TargetSelector] = [] self.entity_ids = entity_ids self.exchanges = exchanges self.codes = codes self.provider = provider # make sure the min level selector correspond to the provider and level self.level = IntervalLevel(level) self.real_time = real_time if start_timestamp and end_timestamp: self.start_timestamp = to_pd_timestamp(start_timestamp) self.end_timestamp = to_pd_timestamp(end_timestamp) else: assert False self.trading_dates = self.entity_schema.get_trading_dates( start_date=self.start_timestamp, end_date=self.end_timestamp) if real_time: logger.info( 'real_time mode, end_timestamp should be future,you could set it big enough for running forever' ) assert self.end_timestamp >= now_pd_timestamp() self.kdata_use_begin_time = kdata_use_begin_time self.draw_result = draw_result self.account_service = SimAccountService( entity_schema=self.entity_schema, trader_name=self.trader_name, timestamp=self.start_timestamp, provider=self.provider, level=self.level) self.add_trading_signal_listener(self.account_service) self.init_selectors(entity_ids=entity_ids, entity_schema=self.entity_schema, exchanges=self.exchanges, codes=self.codes, start_timestamp=self.start_timestamp, end_timestamp=self.end_timestamp) if self.selectors: self.trading_level_asc = list( set([ IntervalLevel(selector.level) for selector in self.selectors ])) self.trading_level_asc.sort() self.logger.info( f'trader level:{self.level},selectors level:{self.trading_level_asc}' ) if self.level != self.trading_level_asc[0]: raise Exception( "trader level should be the min of the selectors") self.trading_level_desc = list(self.trading_level_asc) self.trading_level_desc.reverse() self.targets_slot: TargetsSlot = TargetsSlot() self.session = get_db_session('zvt', data_schema=TraderInfo) self.on_start()
def gen_kdata_schema(pkg: str, providers: List[str], entity_type: str, levels: List[IntervalLevel], adjust_types: List[AdjustType] = [None], entity_in_submodule: bool = False, kdata_module='quotes'): tables = [] base_path = './domain' if kdata_module: base_path = os.path.join(base_path, kdata_module) if entity_in_submodule: base_path = os.path.join(base_path, entity_type) for level in levels: for adjust_type in adjust_types: level = IntervalLevel(level) cap_entity_type = entity_type.capitalize() cap_level = level.value.capitalize() # you should define {EntityType}KdataCommon in kdata_module at first kdata_common = f'{cap_entity_type}KdataCommon' if adjust_type and (adjust_type != AdjustType.qfq): class_name = f'{cap_entity_type}{cap_level}{adjust_type.value.capitalize()}Kdata' table_name = f'{entity_type}_{level.value}_{adjust_type.value.lower()}_kdata' else: class_name = f'{cap_entity_type}{cap_level}Kdata' table_name = f'{entity_type}_{level.value}_kdata' tables.append(table_name) schema_template = f'''# -*- coding: utf-8 -*- # this file is generated by gen_kdata_schema function, dont't change it from sqlalchemy.orm import declarative_base from zvt.contract.register import register_schema from {pkg}.domain.{kdata_module} import {kdata_common} KdataBase = declarative_base() class {class_name}(KdataBase, {kdata_common}): __tablename__ = '{table_name}' register_schema(providers={providers}, db_name='{table_name}', schema_base=KdataBase, entity_type='{entity_type}') ''' # generate the schema with open(os.path.join(base_path, f'{table_name}.py'), 'w') as outfile: outfile.write(schema_template) # generate the package pkg_file = os.path.join(base_path, '__init__.py') if not os.path.exists(pkg_file): package_template = '''# -*- coding: utf-8 -*- ''' with open(pkg_file, 'w') as outfile: outfile.write(package_template) # generate exports gen_exports('./domain')
def record_data(cls, provider_index: int = 0, provider: str = None, exchanges=None, entity_ids=None, codes=None, batch_size=None, force_update=None, sleeping_time=None, default_size=None, real_time=None, fix_duplicate_way=None, start_timestamp=None, end_timestamp=None, close_hour=None, close_minute=None, one_day_trading_minutes=None, **kwargs): if cls.provider_map_recorder: print( f'{cls.__name__} registered recorders:{cls.provider_map_recorder}' ) if provider: recorder_class = cls.provider_map_recorder[provider] else: recorder_class = cls.provider_map_recorder[ cls.providers[provider_index]] # get args for specific recorder class from zvt.contract.recorder import TimeSeriesDataRecorder if issubclass(recorder_class, TimeSeriesDataRecorder): args = [ item for item in inspect.getfullargspec(cls.record_data).args if item not in ('cls', 'provider_index', 'provider') ] else: args = ['batch_size', 'force_update', 'sleeping_time'] # just fill the None arg to kw,so we could use the recorder_class default args kw = {} for arg in args: tmp = eval(arg) if tmp is not None: kw[arg] = tmp # FixedCycleDataRecorder from zvt.contract.recorder import FixedCycleDataRecorder if issubclass(recorder_class, FixedCycleDataRecorder): # contract: # 1)use FixedCycleDataRecorder to record the data with IntervalLevel # 2)the table of schema with IntervalLevel format is {entity}_{level}_[adjust_type]_{event} table: str = cls.__tablename__ try: items = table.split('_') if len(items) == 4: adjust_type = items[2] kw['adjust_type'] = adjust_type level = IntervalLevel(items[1]) except: # for other schema not with normal format,but need to calculate size for remaining days level = IntervalLevel.LEVEL_1DAY kw['level'] = level # add other custom args for k in kwargs: kw[k] = kwargs[k] r = recorder_class(**kw) r.run() return else: r = recorder_class(**kw) r.run() return else: print(f'no recorders for {cls.__name__}')
def get_kdata(entity_id, level=IntervalLevel.LEVEL_1DAY, adjust_type=AdjustType.qfq, limit=10000): entity_type, exchange, code = decode_entity_id(entity_id) level = IntervalLevel(level) sec_id = to_em_sec_id(entity_id) fq_flag = to_em_fq_flag(adjust_type) level_flag = to_em_level_flag(level) # f131 结算价 # f133 持仓 # 目前未获取 url = f"https://push2his.eastmoney.com/api/qt/stock/kline/get?secid={sec_id}&klt={level_flag}&fqt={fq_flag}&lmt={limit}&end=20500000&iscca=1&fields1=f1,f2,f3,f4,f5,f6,f7,f8&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1" resp = requests.get(url, headers=DEFAULT_HEADER) resp.raise_for_status() results = resp.json() data = results["data"] kdatas = [] if data: klines = data["klines"] name = data["name"] for result in klines: # "2000-01-28,1005.26,1012.56,1173.12,982.13,3023326,3075552000.00" # "2021-08-27,19.39,20.30,20.30,19.25,1688497,3370240912.00,5.48,6.01,1.15,3.98,0,0,0" # time,open,close,high,low,volume,turnover # "2022-04-13,10708,10664,10790,10638,402712,43124771328,1.43,0.57,60,0.00,4667112399583576064,4690067230254170112,1169270784" fields = result.split(",") the_timestamp = to_pd_timestamp(fields[0]) the_id = generate_kdata_id(entity_id=entity_id, timestamp=the_timestamp, level=level) open = to_float(fields[1]) close = to_float(fields[2]) high = to_float(fields[3]) low = to_float(fields[4]) volume = to_float(fields[5]) turnover = to_float(fields[6]) # 7 振幅 change_pct = value_to_pct(to_float(fields[8])) # 9 变动 turnover_rate = value_to_pct(to_float(fields[10])) kdatas.append( dict( id=the_id, timestamp=the_timestamp, entity_id=entity_id, provider="em", code=code, name=name, level=level.value, open=open, close=close, high=high, low=low, volume=volume, turnover=turnover, turnover_rate=turnover_rate, change_pct=change_pct, ) ) if kdatas: df = pd.DataFrame.from_records(kdatas) return df