def run(self): for security_item in self.entities: assert isinstance(security_item, StockDetail) if security_item.exchange == 'sh': fc = "{}01".format(security_item.code) if security_item.exchange == 'sz': fc = "{}02".format(security_item.code) # 基本资料 param = {"color": "w", "fc": fc, "SecurityCode": "SZ300059"} resp = requests.post( 'https://emh5.eastmoney.com/api/GongSiGaiKuang/GetJiBenZiLiao', json=param) resp.encoding = 'utf8' resp_json = resp.json()['Result']['JiBenZiLiao'] security_item.profile = resp_json['CompRofile'] security_item.main_business = resp_json['MainBusiness'] security_item.date_of_establishment = to_pd_timestamp( resp_json['FoundDate']) # 关联行业 industries = ','.join(resp_json['Industry'].split('-')) security_item.industries = industries # 关联概念 security_item.concept_indices = resp_json['Block'] # 关联地区 security_item.area_indices = resp_json['Provice'] self.sleep() # 发行相关 param = {"color": "w", "fc": fc} resp = requests.post( 'https://emh5.eastmoney.com/api/GongSiGaiKuang/GetFaXingXiangGuan', json=param) resp.encoding = 'utf8' resp_json = resp.json()['Result']['FaXingXiangGuan'] security_item.issue_pe = to_float(resp_json['PEIssued']) security_item.price = to_float(resp_json['IssuePrice']) security_item.issues = to_float(resp_json['ShareIssued']) security_item.raising_fund = to_float((resp_json['NetCollection'])) security_item.net_winning_rate = pct_to_float( resp_json['LotRateOn']) self.session.commit() self.logger.info('finish recording stock meta for:{}'.format( security_item.code)) self.sleep()
def run(self): for security_item in self.entities: assert isinstance(security_item, StockDetail) if security_item.exchange == "sh": fc = "{}01".format(security_item.code) if security_item.exchange == "sz": fc = "{}02".format(security_item.code) # 基本资料 param = {"color": "w", "fc": fc, "SecurityCode": "SZ300059"} resp = requests.post( "https://emh5.eastmoney.com/api/GongSiGaiKuang/GetJiBenZiLiao", json=param) resp.encoding = "utf8" resp_json = resp.json()["Result"]["JiBenZiLiao"] security_item.profile = resp_json["CompRofile"] security_item.main_business = resp_json["MainBusiness"] security_item.date_of_establishment = to_pd_timestamp( resp_json["FoundDate"]) # 关联行业 industries = ",".join(resp_json["Industry"].split("-")) security_item.industries = industries # 关联概念 security_item.concept_indices = resp_json["Block"] # 关联地区 security_item.area_indices = resp_json["Provice"] self.sleep() # 发行相关 param = {"color": "w", "fc": fc} resp = requests.post( "https://emh5.eastmoney.com/api/GongSiGaiKuang/GetFaXingXiangGuan", json=param) resp.encoding = "utf8" resp_json = resp.json()["Result"]["FaXingXiangGuan"] security_item.issue_pe = to_float(resp_json["PEIssued"]) security_item.price = to_float(resp_json["IssuePrice"]) security_item.issues = to_float(resp_json["ShareIssued"]) security_item.raising_fund = to_float((resp_json["NetCollection"])) security_item.net_winning_rate = pct_to_float( resp_json["LotRateOn"]) self.session.commit() self.logger.info("finish recording stock meta for:{}".format( security_item.code)) self.sleep()
def get_recent_report(data_schema: Type[Mixin], timestamp, entity_id=None, filters=None, max_step=2): i = 0 while i < max_step: report_date = get_recent_report_date(the_date=timestamp, step=i) if filters: filters = filters + [ data_schema.report_date == to_pd_timestamp(report_date) ] else: filters = [data_schema.report_date == to_pd_timestamp(report_date)] df = data_schema.query_data(entity_id=entity_id, filters=filters) if pd_is_not_null(df): return df i = i + 1
def generate_domain(self, entity, original_data): """ generate the data_schema instance using entity and original_data,the original_data is from record result :param entity: :param original_data: """ got_new_data = False # if the domain is directly generated in record method, we just return it if isinstance(original_data, self.data_schema): got_new_data = True return got_new_data, original_data the_id = self.generate_domain_id(entity, original_data) # optional way # item = self.session.query(self.data_schema).get(the_id) items = get_data(data_schema=self.data_schema, session=self.session, provider=self.provider, entity_id=entity.id, filters=[self.data_schema.id == the_id], return_type='domain') if items and not self.force_update: self.logger.info('ignore the data {}:{} saved before'.format( self.data_schema, the_id)) return got_new_data, None if not items: timestamp_str = original_data[self.get_original_time_field()] timestamp = None try: timestamp = to_pd_timestamp(timestamp_str) except Exception as e: self.logger.exception(e) if 'name' in get_schema_columns(self.data_schema): domain_item = self.data_schema(id=the_id, code=entity.code, name=entity.name, entity_id=entity.id, timestamp=timestamp) else: domain_item = self.data_schema(id=the_id, code=entity.code, entity_id=entity.id, timestamp=timestamp) got_new_data = True else: domain_item = items[0] fill_domain_from_dict(domain_item, original_data, self.get_data_map()) return got_new_data, domain_item
def on_trading_close(self, timestamp): self.logger.info('on_trading_close:{}'.format(timestamp)) self.latest_account['value'] = 0 self.latest_account['all_value'] = 0 for position in self.latest_account['positions']: # use qfq for stock security_type, _, _ = decode_security_id(position['security_id']) data_schema = get_kdata_schema(security_type, level=self.level) kdata = get_kdata(provider=self.provider, level=self.level, security_id=position['security_id'], order=data_schema.timestamp.desc(), end_timestamp=timestamp, limit=1) # use qfq for stock if security_type == SecurityType.stock: closing_price = kdata['qfq_close'][0] else: closing_price = kdata['close'][0] position['available_long'] = position['long_amount'] position['available_short'] = position['short_amount'] if closing_price: if (position['long_amount'] is not None) and position['long_amount'] > 0: position['value'] = position['long_amount'] * closing_price self.latest_account['value'] += position['value'] elif (position['short_amount'] is not None) and position['short_amount'] > 0: position['value'] = 2 * (position['short_amount'] * position['average_short_price']) position[ 'value'] -= position['short_amount'] * closing_price self.latest_account['value'] += position['value'] else: self.logger.warning( 'could not refresh close value for position:{},timestamp:{}' .format(position['security_id'], timestamp)) # remove the empty position self.latest_account['positions'] = [ position for position in self.latest_account['positions'] if position['long_amount'] > 0 or position['short_amount'] > 0 ] self.latest_account['all_value'] = self.latest_account[ 'value'] + self.latest_account['cash'] self.latest_account['closing'] = True self.latest_account['timestamp'] = to_pd_timestamp(timestamp) self.logger.info('on_trading_close:{},latest_account:{}'.format( timestamp, self.latest_account)) self.persist_account(timestamp)
def common_filter(query: Query, data_schema, start_timestamp=None, end_timestamp=None, filters=None, order=None, limit=None): if start_timestamp: query = query.filter(data_schema.timestamp >= to_pd_timestamp(start_timestamp)) if end_timestamp: query = query.filter(data_schema.timestamp <= to_pd_timestamp(end_timestamp)) if filters: for filter in filters: query = query.filter(filter) if order is not None: query = query.order_by(order) else: query = query.order_by(data_schema.timestamp.asc()) if limit: query = query.limit(limit) return query
def __init__(self, entity_ids=None, entity_type='stock', exchanges=['sh', 'sz'], codes=None, the_timestamp=None, start_timestamp=None, end_timestamp=None, long_threshold=0.8, short_threshold=-0.8, level=IntervalLevel.LEVEL_1DAY, provider='eastmoney') -> None: self.entity_ids = entity_ids self.entity_type = entity_type self.exchanges = exchanges self.codes = codes self.provider = provider if the_timestamp: self.the_timestamp = to_pd_timestamp(the_timestamp) self.start_timestamp = self.the_timestamp self.end_timestamp = self.the_timestamp elif start_timestamp and end_timestamp: self.start_timestamp = to_pd_timestamp(start_timestamp) self.end_timestamp = to_pd_timestamp(end_timestamp) else: assert False self.long_threshold = long_threshold self.short_threshold = short_threshold self.level = level self.filter_factors: List[FilterFactor] = [] self.score_factors: List[ScoreFactor] = [] self.filter_result = None self.score_result = None self.open_long_df: DataFrame = None self.open_short_df: DataFrame = None self.keep_long_df: DataFrame = None self.keep_short_df: DataFrame = None self.init_factors(entity_ids=entity_ids, entity_type=entity_type, exchanges=exchanges, codes=codes, the_timestamp=the_timestamp, start_timestamp=start_timestamp, end_timestamp=end_timestamp)
def get_etf_stocks(code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): latests: List[EtfStock] = EtfStock.query_data( provider=provider, code=code, end_timestamp=timestamp, order=EtfStock.timestamp.desc(), limit=1, return_type='domain') if latests: latest_record = latests[0] # 获取最新的报表 df = EtfStock.query_data( provider=provider, code=code, codes=codes, ids=ids, end_timestamp=timestamp, filters=[EtfStock.report_date == latest_record.report_date]) # 最新的为年报或者半年报 if latest_record.report_period == ReportPeriod.year or latest_record.report_period == ReportPeriod.half_year: return df # 季报,需要结合 年报或半年报 来算持仓 else: step = 0 while True: report_date = get_recent_report_date(latest_record.report_date, step=step) pre_df = EtfStock.query_data( provider=provider, code=code, codes=codes, ids=ids, end_timestamp=timestamp, filters=[ EtfStock.report_date == to_pd_timestamp(report_date) ]) df = df.append(pre_df) # 半年报和年报 if (ReportPeriod.half_year.value in pre_df['report_period'].tolist()) or ( ReportPeriod.year.value in pre_df['report_period'].tolist()): # 保留最新的持仓 df = df.drop_duplicates(subset=['stock_code'], keep='first') return df step = step + 1 if step >= 20: break
def get_portfolio_stocks(portfolio_entity=Fund, code=None, codes=None, ids=None, timestamp=now_pd_timestamp(), provider=None): portfolio_stock = f'{portfolio_entity.__name__}Stock' data_schema: PortfolioStockHistory = get_schema_by_name(portfolio_stock) latests: List[PortfolioStockHistory] = data_schema.query_data( provider=provider, code=code, end_timestamp=timestamp, order=data_schema.timestamp.desc(), limit=1, return_type='domain') if latests: latest_record = latests[0] # 获取最新的报表 df = data_schema.query_data( provider=provider, code=code, codes=codes, ids=ids, end_timestamp=timestamp, filters=[data_schema.report_date == latest_record.report_date]) # 最新的为年报或者半年报 if latest_record.report_period == ReportPeriod.year or latest_record.report_period == ReportPeriod.half_year: return df # 季报,需要结合 年报或半年报 来算持仓 else: step = 0 while step <= 20: report_date = get_recent_report_date(latest_record.report_date, step=step) pre_df = data_schema.query_data( provider=provider, code=code, codes=codes, ids=ids, end_timestamp=timestamp, filters=[ data_schema.report_date == to_pd_timestamp(report_date) ]) df = df.append(pre_df) # 半年报和年报 if (ReportPeriod.half_year.value in pre_df['report_period'].tolist()) or ( ReportPeriod.year.value in pre_df['report_period'].tolist()): # 保留最新的持仓 df = df.drop_duplicates(subset=['stock_code'], keep='first') return df step = step + 1
def test_000778_rights_issue_detail(): result = fundamental.get_rights_issue_detail(session=session, provider=Provider.EASTMONEY, return_type='domain', codes=['000778'], end_timestamp='2018-09-30', order=RightsIssueDetail.timestamp.desc()) assert len(result) == 2 latest: RightsIssueDetail = result[0] assert latest.timestamp == to_pd_timestamp('2001-09-10') assert latest.rights_issues == 43570000 assert latest.rights_raising_fund == 492300000 assert latest.rights_issue_price == 11.3
def init_timestamps(self, entity_item) -> List[pd.Timestamp]: last_valid_date = pre_month_start_date() if self.record_history: # 每个月记录一次 return [ to_pd_timestamp(item) for item in pd.date_range( entity_item.list_date, last_valid_date, freq="M") ] else: return [last_valid_date]
def test_000778_spo_detial(): result = fundamental.get_spo_detail(session=session, provider=Provider.EASTMONEY, return_type='domain', codes=['000778'], end_timestamp='2018-09-30', order=SPODetail.timestamp.desc()) assert len(result) == 4 latest: SPODetail = result[0] assert latest.timestamp == to_pd_timestamp('2017-04-01') assert latest.spo_issues == 347600000 assert latest.spo_price == 5.15 assert latest.spo_raising_fund == 1766000000
def update_account(self, security_id, new_position, timestamp): # 先去掉之前的position positions = [position for position in self.latest_account.positions if position.security_id != security_id] # 更新为新的position positions.append(new_position) self.latest_account.positions = positions self.latest_account.timestamp = to_pd_timestamp(timestamp) self.save_account(self.latest_account)
def __init__(self, security_list=None, security_type=SecurityType.stock, exchanges=['sh', 'sz'], codes=None, the_timestamp=None, start_timestamp=None, end_timestamp=None, threshold=0.8, level=TradingLevel.LEVEL_1DAY, provider='eastmoney') -> None: self.security_list = security_list self.security_type = security_type self.exchanges = exchanges self.codes = codes self.provider = provider if the_timestamp: self.the_timestamp = to_pd_timestamp(the_timestamp) self.start_timestamp = self.the_timestamp self.end_timestamp = self.the_timestamp elif start_timestamp and end_timestamp: self.start_timestamp = to_pd_timestamp(start_timestamp) self.end_timestamp = to_pd_timestamp(end_timestamp) else: assert False self.threshold = threshold self.level = level self.must_factors: List[FilterFactor] = None self.score_factors: List[ScoreFactor] = None self.must_result = None self.score_result = None self.result_df: DataFrame = None self.init_factors(security_list=security_list, security_type=security_type, exchanges=exchanges, codes=codes, the_timestamp=the_timestamp, start_timestamp=start_timestamp, end_timestamp=end_timestamp)
def on_finish(self, security_item): kdatas = get_kdata( provider=self.provider, security_id=security_item.id, level=self.level.value, order=self.data_schema.timestamp.asc(), return_type='domain', session=self.session, filters=[ self.data_schema.hfq_close.is_(None), self.data_schema.timestamp >= to_pd_timestamp('2005-01-01') ]) if kdatas: start = kdatas[0].timestamp end = kdatas[-1].timestamp # get hfq from joinquant df = get_price(to_jq_security_id(security_item), start_date=to_time_str(start), end_date=now_time_str(), frequency='daily', fields=['factor', 'open', 'close', 'low', 'high'], skip_paused=True, fq='post') if df is not None and not df.empty: # fill hfq data for kdata in kdatas: time_str = to_time_str(kdata.timestamp) if time_str in df.index: kdata.hfq_open = df.loc[time_str, 'open'] kdata.hfq_close = df.loc[time_str, 'close'] kdata.hfq_high = df.loc[time_str, 'high'] kdata.hfq_low = df.loc[time_str, 'low'] kdata.factor = df.loc[time_str, 'factor'] self.session.commit() latest_factor = df.factor[-1] # factor not change yet, no need to reset the qfq past if latest_factor == self.current_factors.get(security_item.id): sql = 'UPDATE {} SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'security_id=\'{}\' and level=\'{}\' and (qfq_close isnull or qfq_high isnull or qfq_low isnull or qfq_open isnull)'.format( self.data_schema.__table__, latest_factor, latest_factor, latest_factor, latest_factor, security_item.id, self.level.value) else: sql = 'UPDATE {} SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'security_id=\'{}\' and level=\'{}\''.format(self.data_schema.__table__, latest_factor, latest_factor, latest_factor, latest_factor, security_item.id, self.level.value) self.logger.info(sql) self.session.execute(sql) self.session.commit() # TODO:use netease provider to get turnover_rate self.logger.info('use netease provider to get turnover_rate')
def common_filter( query: Query, data_schema, start_timestamp=None, end_timestamp=None, filters=None, order=None, limit=None, time_field="timestamp", ): """ build filter by the arguments :param query: sql query :param data_schema: data schema :param start_timestamp: start timestamp :param end_timestamp: end timestamp :param filters: sql filters :param order: sql order :param limit: sql limit size :param time_field: time field in columns :return: result query """ assert data_schema is not None time_col = eval("data_schema.{}".format(time_field)) if start_timestamp: query = query.filter(time_col >= to_pd_timestamp(start_timestamp)) if end_timestamp: query = query.filter(time_col <= to_pd_timestamp(end_timestamp)) if filters: for filter in filters: query = query.filter(filter) if order is not None: query = query.order_by(order) else: query = query.order_by(time_col.asc()) if limit: query = query.limit(limit) return query
def to_jq_report_period(timestamp): the_date = to_pd_timestamp(timestamp) report_period = to_report_period_type(timestamp) if report_period == ReportPeriod.year: return '{}'.format(the_date.year) if report_period == ReportPeriod.season1: return '{}q1'.format(the_date.year) if report_period == ReportPeriod.half_year: return '{}q2'.format(the_date.year) if report_period == ReportPeriod.season3: return '{}q3'.format(the_date.year)
def test_000778_dividend_financing(): result = fundamental.get_dividend_financing(session=session, provider=Provider.EASTMONEY, return_type='domain', codes=['000778'], end_timestamp='2018-09-30', order=DividendFinancing.timestamp.desc()) assert len(result) == 22 latest: DividendFinancing = result[1] assert latest.timestamp == to_pd_timestamp('2017') assert latest.dividend_money == 598632026.4 assert latest.spo_issues == 347572815.0 assert latest.rights_issues == 0 assert latest.ipo_issues == 0
def get_targets(self, timestamp, target_type: TargetType = TargetType.open_long) -> pd.DataFrame: if target_type == TargetType.open_long: df = self.open_long_df if target_type == TargetType.open_short: df = self.open_short_df if pd_is_not_null(df): if timestamp in df.index: target_df = df.loc[[to_pd_timestamp(timestamp)], :] return target_df['entity_id'].tolist() return []
def init_timestamps(self, entity): param = {"color": "w", "fc": get_fc(entity)} timestamp_json_list = call_eastmoney_api( url=self.timestamps_fetching_url, path_fields=self.timestamp_list_path_fields, param=param ) if self.timestamp_path_fields and timestamp_json_list: timestamps = [get_from_path_fields(data, self.timestamp_path_fields) for data in timestamp_json_list] return [to_pd_timestamp(t) for t in timestamps] return []
def init_main_index(provider='exchange'): from zvt.utils.time_utils import to_pd_timestamp import pandas as pd from zvt.contract.api import df_to_db from zvt.domain import Index for item in CHINA_STOCK_MAIN_INDEX: item['timestamp'] = to_pd_timestamp(item['timestamp']) df = pd.DataFrame(CHINA_STOCK_MAIN_INDEX) # print(df) df_to_db(df=df, data_schema=Index, provider=provider, force_update=False)
def on_finish_entity(self, entity): # fill the timestamp for report published date the_data_list = get_data(data_schema=self.data_schema, provider=self.provider, entity_id=entity.id, order=self.data_schema.timestamp.asc(), return_type='domain', session=self.session, filters=[self.data_schema.timestamp == self.data_schema.report_date, self.data_schema.timestamp >= to_pd_timestamp('2005-01-01')]) if the_data_list: if self.data_schema == FinanceFactor: for the_data in the_data_list: self.fill_timestamp_with_jq(entity, the_data) else: df = get_finance_factors(entity_id=entity.id, columns=[FinanceFactor.timestamp, FinanceFactor.report_date, FinanceFactor.id], filters=[FinanceFactor.timestamp != FinanceFactor.report_date, FinanceFactor.timestamp >= to_pd_timestamp('2005-01-01'), FinanceFactor.report_date >= the_data_list[0].report_date, FinanceFactor.report_date <= the_data_list[-1].report_date, ]) if df_is_not_null(df): index_df(df, index='report_date') for the_data in the_data_list: if (df is not None) and (not df.empty) and the_data.report_date in df.index: the_data.timestamp = df.at[the_data.report_date, 'timestamp'] self.logger.info( 'db fill {} {} timestamp:{} for report_date:{}'.format(self.data_schema, entity.id, the_data.timestamp, the_data.report_date)) self.session.commit() else: # self.logger.info( # 'waiting jq fill {} {} timestamp:{} for report_date:{}'.format(self.data_schema, # security_item.id, # the_data.timestamp, # the_data.report_date)) self.fill_timestamp_with_jq(entity, the_data)
def on_trading_close(self, timestamp): self.logger.debug('on_trading_close:{}'.format(timestamp)) self.account.value = 0 self.account.all_value = 0 for position in self.account.positions: entity_type, _, _ = decode_entity_id(position.entity_id) data_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=self.adjust_type) kdata = get_kdata(provider=self.provider, level=IntervalLevel.LEVEL_1DAY, entity_id=position.entity_id, order=data_schema.timestamp.desc(), end_timestamp=timestamp, limit=1, adjust_type=self.adjust_type) closing_price = kdata['close'][0] position.available_long = position.long_amount position.available_short = position.short_amount if closing_price: if (position.long_amount is not None) and position.long_amount > 0: position.value = position.long_amount * closing_price self.account.value += position.value elif (position.short_amount is not None) and position.short_amount > 0: position.value = 2 * (position.short_amount * position.average_short_price) position.value -= position.short_amount * closing_price self.account.value += position.value else: self.logger.warning( 'could not refresh close value for position:{},timestamp:{}' .format(position['entity_id'], timestamp)) # remove the empty position self.account.positions = [ position for position in self.account.positions if position.long_amount > 0 or position.short_amount > 0 ] self.account.all_value = self.account.value + self.account.cash self.account.closing = True self.account.timestamp = to_pd_timestamp(timestamp) self.logger.debug('on_trading_close:{},latest_account:{}'.format( timestamp, self.account)) self.persist_account(timestamp)
def to_report_period_type(report_date): the_date = to_pd_timestamp(report_date) if the_date.month == 3 and the_date.day == 31: return ReportPeriod.season1.value if the_date.month == 6 and the_date.day == 30: return ReportPeriod.half_year.value if the_date.month == 9 and the_date.day == 30: return ReportPeriod.season3.value if the_date.month == 12 and the_date.day == 31: return ReportPeriod.year.value return None
def download_stock_list(self, response, exchange): df = None if exchange == "sh": df = pd.read_csv( io.BytesIO(response.content), sep="\s+", encoding="GB2312", dtype=str, parse_dates=["上市日期"], error_bad_lines=False, ) if df is not None: df = df.loc[:, ["公司代码", "公司简称", "上市日期"]] elif exchange == "sz": df = pd.read_excel(io.BytesIO(response.content), sheet_name="A股列表", dtype=str, parse_dates=["A股上市日期"]) if df is not None: df = df.loc[:, ["A股代码", "A股简称", "A股上市日期"]] if df is not None: df.columns = ["code", "name", "list_date"] df = df.dropna(subset=["code"]) # handle the dirty data # 600996,贵广网络,2016-12-26,2016-12-26,sh,stock,stock_sh_600996,,次新股,贵州,, df.loc[df["code"] == "600996", "list_date"] = "2016-12-26" print(df[df["list_date"] == "-"]) df["list_date"] = df["list_date"].apply( lambda x: to_pd_timestamp(x)) df["exchange"] = exchange df["entity_type"] = "stock" df["id"] = df[["entity_type", "exchange", "code"]].apply(lambda x: "_".join(x.astype(str)), axis=1) df["entity_id"] = df["id"] df["timestamp"] = df["list_date"] df = df.dropna(axis=0, how="any") df = df.drop_duplicates(subset=("id"), keep="last") df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=False) # persist StockDetail too df_to_db(df=df, data_schema=StockDetail, provider=self.provider, force_update=False) self.logger.info(df.tail()) self.logger.info("persist stock list successs")
def __init__(self, security_type=SecurityType.stock, exchanges=['sh', 'sz'], codes=None, start_timestamp=None, end_timestamp=None) -> None: self.trader_name = type(self).__name__.lower() self.trading_signal_queue = queue.Queue() self.trading_signal_listeners = [] self.state_listeners = [] self.selectors: List[TargetSelector] = None self.security_type = security_type self.exchanges = exchanges self.codes = codes self.start_timestamp = start_timestamp self.end_timestamp = end_timestamp if self.start_timestamp: self.start_timestamp = to_pd_timestamp(self.start_timestamp) else: self.start_timestamp = now_pd_timestamp() self.current_timestamp = self.start_timestamp if self.end_timestamp: self.end_timestamp = to_pd_timestamp(self.end_timestamp) self.account_service = SimAccountService( trader_name=self.trader_name, timestamp=self.start_timestamp) self.add_trading_signal_listener(self.account_service) self.init_selectors(security_type=self.security_type, exchanges=self.exchanges, codes=self.codes, start_timestamp=self.start_timestamp, end_timestamp=self.end_timestamp)
def to_jq_report_period(timestamp): the_date = to_pd_timestamp(timestamp) report_period = to_report_period_type(timestamp) if report_period == ReportPeriod.year.value: return "{}".format(the_date.year) if report_period == ReportPeriod.season1.value: return "{}q1".format(the_date.year) if report_period == ReportPeriod.half_year.value: return "{}q2".format(the_date.year) if report_period == ReportPeriod.season3.value: return "{}q3".format(the_date.year) assert False
def risky_company(the_date=to_pd_timestamp(now_time_str()), income_yoy=-0.1, profit_yoy=-0.1, entity_ids=None): codes = [] start_timestamp = to_pd_timestamp(the_date) - datetime.timedelta(130) # 营收降,利润降,流动比率低,速动比率低 finance_filter = or_(FinanceFactor.op_income_growth_yoy < income_yoy, FinanceFactor.net_profit_growth_yoy <= profit_yoy, FinanceFactor.current_ratio < 0.7, FinanceFactor.quick_ratio < 0.5) df = FinanceFactor.query_data(entity_ids=entity_ids, start_timestamp=start_timestamp, filters=[finance_filter], columns=['code']) if pd_is_not_null(df): codes = codes + df.code.tolist() # 高应收,高存货,高商誉 balance_filter = (BalanceSheet.accounts_receivable + BalanceSheet.inventories + BalanceSheet.goodwill) \ > BalanceSheet.total_equity df = BalanceSheet.query_data(entity_ids=entity_ids, start_timestamp=start_timestamp, filters=[balance_filter], columns=['code']) if pd_is_not_null(df): codes = codes + df.code.tolist() # 应收>利润*1/2 df1 = BalanceSheet.query_data(entity_ids=entity_ids, start_timestamp=start_timestamp, columns=[BalanceSheet.code, BalanceSheet.accounts_receivable]) if pd_is_not_null(df1): df1.drop_duplicates(subset='code', keep='last', inplace=True) df1 = df1.set_index('code', drop=True).sort_index() df2 = IncomeStatement.query_data(entity_ids=entity_ids, start_timestamp=start_timestamp, columns=[IncomeStatement.code, IncomeStatement.net_profit]) if pd_is_not_null(df2): df2.drop_duplicates(subset='code', keep='last', inplace=True) df2 = df2.set_index('code', drop=True).sort_index() if pd_is_not_null(df1) and pd_is_not_null(df2): codes = codes + df1[df1.accounts_receivable > df2.net_profit / 2].index.tolist() return list(set(codes))
def test_000778_spo_detial(): result = SpoDetail.query_data(session=session, provider='eastmoney', return_type='domain', codes=['000778'], end_timestamp='2018-09-30', order=SpoDetail.timestamp.desc()) assert len(result) == 4 latest: SpoDetail = result[0] assert latest.timestamp == to_pd_timestamp('2017-04-01') assert latest.spo_issues == 347600000 assert latest.spo_price == 5.15 assert latest.spo_raising_fund == 1766000000
def __init__(self, security_id, trading_level, timestamp, trader_name, history_size=250) -> None: self.security_id = security_id self.trading_level = trading_level self.current_timestamp = trading_level.floor_timestamp(to_pd_timestamp(timestamp)) self.model_name = "{}_{}_{}".format(trader_name, type(self).__name__, trading_level.value) self.history_size = history_size self.add_trading_signal_listener(SimAccountService(trader_name=trader_name, model_name=self.model_name, timestamp=timestamp)) self.close_hour, self.close_minute = get_close_time(self.security_id)