def record(self, entity_item, start, end, size, timestamps): self.seed += 1 timestamp = timestamps[0] the_url = self.url.format(to_time_str(timestamp), to_time_str(timestamp)) items = get_all_results(url=the_url, token=GithubAccount.get_token(seed=self.seed)) current_time = now_pd_timestamp() results = [{ 'id': f'user_github_{item["login"]}', 'entity_id': f'user_github_{item["login"]}', 'timestamp': timestamp, 'exchange': 'github', 'entity_type': 'user', 'code': item['login'], 'node_id': item['node_id'], 'created_timestamp': current_time, 'updated_timestamp': None } for item in items] # for save faster df = pd.DataFrame(data=results[:-1]) df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force=True) return results[-1:]
def generate_kdata_id(entity_id, timestamp, level): if level >= IntervalLevel.LEVEL_1DAY: return "{}_{}".format(entity_id, to_time_str(timestamp, fmt=TIME_FORMAT_DAY)) else: return "{}_{}".format(entity_id, to_time_str(timestamp, fmt=TIME_FORMAT_ISO8601))
def fetch_cumulative_net_value(self, security_item, start, end) -> pd.DataFrame: query_url = 'http://api.fund.eastmoney.com/f10/lsjz?' \ 'fundCode={}&pageIndex={}&pageSize=200&startDate={}&endDate={}' page = 1 df = pd.DataFrame() while True: url = query_url.format(security_item.code, page, to_time_str(start), to_time_str(end)) response = requests.get(url, headers=EASTMONEY_ETF_NET_VALUE_HEADER) response_json = demjson.decode(response.text) response_df = pd.DataFrame(response_json['Data']['LSJZList']) # 最后一页 if response_df.empty: break response_df['FSRQ'] = pd.to_datetime(response_df['FSRQ']) response_df['JZZZL'] = pd.to_numeric(response_df['JZZZL'], errors='coerce') response_df['LJJZ'] = pd.to_numeric(response_df['LJJZ'], errors='coerce') response_df = response_df.fillna(0) response_df.set_index('FSRQ', inplace=True, drop=True) df = pd.concat([df, response_df]) page += 1 self.sleep() return df
def record(self, entity, start, end, size, timestamps): if self.start_timestamp: start = max(self.start_timestamp, to_pd_timestamp(start)) end = now_pd_timestamp() + timedelta(days=1) start_timestamp = to_time_str(start) end_timestamp = to_time_str(end) # 不复权 df = get_price(to_jq_entity_id(entity), start_date=to_time_str(start_timestamp), end_date=end_timestamp, frequency=self.jq_trading_level, fields=['open', 'close', 'low', 'high', 'volume', 'money'], skip_paused=True, fq=None) df.index.name = 'timestamp' df.reset_index(inplace=True) df['name'] = entity.name df.rename(columns={'money': 'turnover'}, inplace=True) df['timestamp'] = pd.to_datetime(df['timestamp']) df['provider'] = 'joinquant' df['level'] = self.level.value # remove the unfinished kdata if is_in_trading(entity_type='stock', exchange='sh', timestamp=df.iloc[-1, :]['timestamp']): df = df.iloc[:-1, :] return df.to_dict(orient='records')
def generate_kdata_id(se): if self.level >= IntervalLevel.LEVEL_1DAY: return "{}_{}".format( se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)) else: return "{}_{}".format( se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_ISO8601))
def on_finish_entity(self, entity): kdatas = get_kdata( provider=self.provider, entity_id=entity.id, level=self.level.value, order=self.data_schema.timestamp.asc(), return_type='domain', session=self.session, filters=[ self.data_schema.hfq_close.is_(None), self.data_schema.timestamp >= to_pd_timestamp('2005-01-01') ]) if kdatas: start = kdatas[0].timestamp end = kdatas[-1].timestamp # get hfq from joinquant df = get_price(to_jq_entity_id(entity), start_date=to_time_str(start), end_date=now_time_str(), frequency='daily', fields=['factor', 'open', 'close', 'low', 'high'], skip_paused=True, fq='post') if df_is_not_null(df): # fill hfq data for kdata in kdatas: time_str = to_time_str(kdata.timestamp) if time_str in df.index: kdata.hfq_open = df.loc[time_str, 'open'] kdata.hfq_close = df.loc[time_str, 'close'] kdata.hfq_high = df.loc[time_str, 'high'] kdata.hfq_low = df.loc[time_str, 'low'] kdata.factor = df.loc[time_str, 'factor'] self.session.add_all(kdatas) self.session.commit() latest_factor = df.factor[-1] # factor not change yet, no need to reset the qfq past if latest_factor == self.current_factors.get(entity.id): sql = 'UPDATE {} SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'entity_id=\'{}\' and level=\'{}\' and (qfq_close isnull or qfq_high isnull or qfq_low isnull or qfq_open isnull)'.format( self.data_schema.__table__, latest_factor, latest_factor, latest_factor, latest_factor, entity.id, self.level.value) else: sql = 'UPDATE {} SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'entity_id=\'{}\' and level=\'{}\''.format(self.data_schema.__table__, latest_factor, latest_factor, latest_factor, latest_factor, entity.id, self.level.value) self.logger.info(sql) self.session.execute(sql) self.session.commit()
def record(self, entity, start, end, size, timestamps): # 只要前复权数据 if not self.end_timestamp: df = get_bars(to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], fq_ref_date=to_time_str(now_pd_timestamp()), include_now=True) else: end_timestamp = to_time_str(self.end_timestamp) df = get_bars(to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], end_dt=end_timestamp, fq_ref_date=to_time_str(now_pd_timestamp()), include_now=False) if pd_is_not_null(df): df['name'] = entity.name df.rename(columns={'money': 'turnover', 'date': 'timestamp'}, inplace=True) df['entity_id'] = entity.id df['timestamp'] = pd.to_datetime(df['timestamp']) df['provider'] = 'joinquant' df['level'] = self.level.value df['code'] = entity.code # 判断是否需要重新计算之前保存的前复权数据 check_df = df.head(1) check_date = check_df['timestamp'][0] current_df = get_kdata(entity_id=entity.id, provider=self.provider, start_timestamp=check_date, end_timestamp=check_date, limit=1, level=self.level) if pd_is_not_null(current_df): old = current_df.iloc[0, :]['close'] new = check_df['close'][0] # 相同时间的close不同,表明前复权需要重新计算 if round(old, 2) != round(new, 2): self.factor = new / old self.last_timestamp = pd.Timestamp(check_date) def generate_kdata_id(se): if self.level >= IntervalLevel.LEVEL_1DAY: return "{}_{}".format(se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)) else: return "{}_{}".format(se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_ISO8601)) df['id'] = df[['entity_id', 'timestamp']].apply(generate_kdata_id, axis=1) df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) return None
def record(self, entity, start, end, size, timestamps): if self.start_timestamp: start = max(self.start_timestamp, to_pd_timestamp(start)) # if self.level < IntervalLevel.LEVEL_1HOUR: # start = '2019-01-01' end = now_pd_timestamp() start_timestamp = to_time_str(start) # 聚宽get_price函数必须指定结束时间,否则会有未来数据 end_timestamp = to_time_str(end, fmt=TIME_FORMAT_MINUTE2) # 不复权 df = get_price( to_jq_entity_id(entity), start_date=to_time_str(start_timestamp), end_date=end_timestamp, frequency=self.jq_trading_level, fields=['open', 'close', 'low', 'high', 'volume', 'money'], skip_paused=True, fq=None) if df_is_not_null(df): df.index.name = 'timestamp' df.reset_index(inplace=True) df['name'] = entity.name df.rename(columns={'money': 'turnover'}, inplace=True) df['entity_id'] = entity.id df['timestamp'] = pd.to_datetime(df['timestamp']) df['provider'] = 'joinquant' df['level'] = self.level.value df['code'] = entity.code def generate_kdata_id(se): if self.level >= IntervalLevel.LEVEL_1DAY: return "{}_{}".format( se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)) else: return "{}_{}".format( se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_ISO8601)) df['id'] = df[['entity_id', 'timestamp']].apply(generate_kdata_id, axis=1) df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force=self.force_update) return None
def record(self, entity, start, end, size, timestamps): if self.start_timestamp: start = max(self.start_timestamp, to_pd_timestamp(start)) start_timestamp = to_time_str(start) ccxt_exchange = CCXTAccount.get_ccxt_exchange(entity.exchange) if ccxt_exchange.has['fetchOHLCV']: limit = CCXTAccount.get_kdata_limit(entity.exchange) limit = min(size, limit) kdata_list = [] if CCXTAccount.exchange_conf[entity.exchange]['support_since']: kdatas = ccxt_exchange.fetch_ohlcv( entity.code, timeframe=self.ccxt_trading_level, since=start_timestamp) else: kdatas = ccxt_exchange.fetch_ohlcv( entity.code, timeframe=self.ccxt_trading_level, limit=limit) # always ignore the latest one,because it's not finished for kdata in kdatas[0:-1]: current_timestamp = kdata[0] if self.level == IntervalLevel.LEVEL_1DAY: current_timestamp = to_time_str(current_timestamp) kdata_json = { 'timestamp': to_pd_timestamp(current_timestamp), 'open': kdata[1], 'high': kdata[2], 'low': kdata[3], 'close': kdata[4], 'volume': kdata[5], 'name': entity.name, 'provider': 'ccxt', 'level': self.level.value } kdata_list.append(kdata_json) return kdata_list else: self.logger.warning("exchange:{} not support fetchOHLCV".format( entity.exchange))
def test_china_stock_reader(): data_reader = DataReader(codes=['002572', '000338'], data_schema=Stock1dKdata, provider='joinquant', start_timestamp='2019-01-01', end_timestamp='2019-06-10') categories = data_reader.get_categories() df = data_reader.get_data_df() assert 'stock_sz_002572' in categories assert 'stock_sz_000338' in categories assert ('stock_sz_002572', '2019-01-02') in df.index assert ('stock_sz_000338', '2019-01-02') in df.index assert ('stock_sz_002572', '2019-06-10') in df.index assert ('stock_sz_000338', '2019-06-10') in df.index for timestamp in iterate_timestamps(entity_type='stock', exchange='sz', level=IntervalLevel.LEVEL_1DAY, start_timestamp='2019-06-11', end_timestamp='2019-06-14'): data_reader.move_on(to_timestamp=timestamp, timeout=0) df = data_reader.get_data_df() assert ('stock_sz_002572', timestamp) in df.index assert ('stock_sz_000338', to_time_str(timestamp)) in df.index data_reader.data_drawer().draw_table() data_reader.data_drawer().draw_kline()
def select_by_finance(timestamp=now_pd_timestamp(), entity_ids=None): if timestamp.dayofweek in (5, 6): logger.info(f'today:{timestamp} is {timestamp.day_name()},just ignore') today = to_time_str(timestamp) my_selector = TargetSelector(start_timestamp='2015-01-01', end_timestamp=today, entity_ids=entity_ids) # add the factors good_factor1 = GoodCompanyFactor(start_timestamp='2015-01-01', end_timestamp=today, entity_ids=entity_ids) good_factor2 = GoodCompanyFactor( start_timestamp='2015-01-01', end_timestamp=today, entity_ids=entity_ids, data_schema=CashFlowStatement, columns=[ CashFlowStatement.report_period, CashFlowStatement.net_op_cash_flows ], filters=[CashFlowStatement.net_op_cash_flows > 0], col_threshold={'net_op_cash_flows': 100000000}) my_selector.add_filter_factor(good_factor1) my_selector.add_filter_factor(good_factor2) my_selector.run() long_targets = my_selector.get_open_long_targets(today) logger.info(f'selected:{len(long_targets)}') return long_targets
def generate_request_param(self, security_item, start, end, size, timestamp): return { "color": "w", "fc": get_fc(security_item), "BaoGaoQi": to_time_str(timestamp) }
def record(self, entity, start, end, size, timestamps): q = query(valuation).filter(valuation.code == to_jq_entity_id(entity)) count: pd.Timedelta = now_pd_timestamp() - start df = get_fundamentals_continuously(q, end_date=now_time_str(), count=count.days + 1, panel=False) df['entity_id'] = entity.id df['timestamp'] = pd.to_datetime(df['day']) df['code'] = entity.code df['name'] = entity.name df['id'] = df['timestamp'].apply( lambda x: "{}_{}".format(entity.id, to_time_str(x))) df = df.rename( { 'pe_ratio_lyr': 'pe', 'pe_ratio': 'pe_ttm', 'pb_ratio': 'pb', 'ps_ratio': 'ps', 'pcf_ratio': 'pcf' }, axis='columns') df['market_cap'] = df['market_cap'] * 100000000 df['circulating_cap'] = df['circulating_cap'] * 100000000 df['capitalization'] = df['capitalization'] * 10000 df['circulating_cap'] = df['circulating_cap'] * 10000 df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) return None
def record(self, entity, start, end, size, timestamps): jq_code = code_map_jq.get(entity.code) q = query(finance.STK_EXCHANGE_TRADE_INFO).filter( finance.STK_EXCHANGE_TRADE_INFO.exchange_code == jq_code, finance.STK_EXCHANGE_TRADE_INFO.date >= to_time_str(start)).limit(2000) df = finance.run_query(q) print(df) json_results = [] for item in df.to_dict(orient='records'): result = { 'provider': self.provider, 'timestamp': item['date'], 'name': entity.name, 'pe': item['pe_average'], 'total_value': multiple_number(item['total_market_cap'], 100000000), 'total_tradable_vaule': multiple_number(item['circulating_market_cap'], 100000000), 'volume': multiple_number(item['volume'], 10000), 'turnover': multiple_number(item['money'], 100000000), 'turnover_rate': item['turnover_ratio'] } json_results.append(result) if len(json_results) < 100: self.one_shot = True return json_results
def generate_request_param(self, security_item, start, end, size, timestamps): if len(timestamps) <= 10: param = { "color": "w", "fc": get_fc(security_item), "corpType": company_type_flag(security_item), # 0 means get all types "reportDateType": 0, "endDate": '', "latestCount": size } else: param = { "color": "w", "fc": get_fc(security_item), "corpType": company_type_flag(security_item), # 0 means get all types "reportDateType": 0, "endDate": to_time_str(timestamps[10]), "latestCount": 10 } if self.finance_report_type == 'LiRunBiaoList' or self.finance_report_type == 'XianJinLiuLiangBiaoList': param['reportType'] = 1 return param
def on_trading_open(self, timestamp): self.logger.info('on_trading_open:{}'.format(timestamp)) if is_same_date(timestamp, self.start_timestamp): return # get the account for trading at the date accounts = get_account(session=self.session, trader_name=self.trader_name, return_type='domain', end_timestamp=to_time_str(timestamp), limit=1, order=SimAccount.timestamp.desc()) if accounts: account = accounts[0] else: return positions = [] # FIXME:dump all directly for position_domain in account.positions: position_dict = position_schema.dump(position_domain).data self.logger.info('current position:{}'.format(position_dict)) del position_dict['sim_account'] positions.append(position_dict) self.latest_account = sim_account_schema.dump(account).data self.latest_account['positions'] = positions self.logger.info('on_trading_open:{},latest_account:{}'.format( timestamp, self.latest_account))
def record(self, entity, start, end, size, timestamps): q = query(finance.STK_ML_QUOTA).filter( finance.STK_ML_QUOTA.link_id == entity.code, finance.STK_ML_QUOTA.day >= to_time_str(start)).limit(2000) df = finance.run_query(q) print(df) json_results = [] for item in df.to_dict(orient='records'): result = { 'provider': self.provider, 'timestamp': item['day'], 'name': entity.name, 'buy_amount': multiple_number(item['buy_amount'], 100000000), 'buy_volume': item['buy_volume'], 'sell_amount': multiple_number(item['sell_amount'], 100000000), 'sell_volume': item['sell_volume'], 'quota_daily': multiple_number(item['quota_daily'], 100000000), 'quota_daily_balance': multiple_number(item['quota_daily_balance'], 100000000) } json_results.append(result) if len(json_results) < 100: self.one_shot = True return json_results
def load_data(self): if self.entity_ids: self.data_df = get_data(data_schema=self.data_schema, entity_ids=self.entity_ids, provider=self.provider, columns=self.columns, start_timestamp=self.start_timestamp, end_timestamp=self.end_timestamp, filters=self.filters, order=self.order, limit=self.limit, level=self.level, time_field=self.time_field, index=self.time_field) else: self.data_df = get_data(data_schema=self.data_schema, codes=self.codes, provider=self.provider, columns=self.columns, start_timestamp=self.start_timestamp, end_timestamp=self.end_timestamp, filters=self.filters, order=self.order, limit=self.limit, level=self.level, time_field=self.time_field, index=self.time_field) if self.trip_timestamp: if self.level == IntervalLevel.LEVEL_1DAY: self.data_df[self.time_field] = self.data_df[self.time_field].apply( lambda x: to_pd_timestamp(to_time_str(x))) if df_is_not_null(self.data_df): self.normal_data = NormalData(df=self.data_df, category_field=self.category_field, index_field=self.time_field, is_timeseries=True) self.data_df = self.normal_data.data_df for listener in self.data_listeners: listener.on_data_loaded(self.data_df)
def record(self, entity, start, end, size, timestamps): start = to_time_str(start, fmt=TIME_FORMAT_DAY1) end = now_time_str(fmt=TIME_FORMAT_DAY1) if entity.exchange == 'sh': exchange_flag = 0 else: exchange_flag = 1 url = self.url.format(exchange_flag, entity.code, start, end) response = requests.get(url=url) df = read_csv(io.BytesIO(response.content), encoding='GB2312', na_values='None') if df is None: return [] df['name'] = entity.name # 指数数据 if entity.entity_type == 'index': df = df.loc[:, ['日期', 'name', '最低价', '开盘价', '收盘价', '最高价', '成交量', '成交金额', '涨跌幅']] df.columns = ['timestamp', 'name', 'low', 'open', 'close', 'high', 'volume', 'turnover', 'change_pct'] # 股票数据 else: df = df.loc[:, ['日期', 'name', '最低价', '开盘价', '收盘价', '最高价', '成交量', '成交金额', '涨跌幅', '换手率']] df.columns = ['timestamp', 'name', 'low', 'open', 'close', 'high', 'volume', 'turnover', 'change_pct', 'turnover_rate'] df['timestamp'] = pd.to_datetime(df['timestamp']) df['provider'] = 'netease' df['level'] = self.level.value return df.to_dict(orient='records')
def test_china_stock_reader(): data_reader = DataReader(codes=['002572', '000338'], data_schema=Stock1dKdata, entity_schema=Stock, start_timestamp='2019-01-01', end_timestamp='2019-06-10', entity_provider='eastmoney') categories = data_reader.data_df.index.levels[0].to_list() df = data_reader.data_df assert 'stock_sz_002572' in categories assert 'stock_sz_000338' in categories assert ('stock_sz_002572', '2019-01-02') in df.index assert ('stock_sz_000338', '2019-01-02') in df.index assert ('stock_sz_002572', '2019-06-10') in df.index assert ('stock_sz_000338', '2019-06-10') in df.index for timestamp in iterate_timestamps(entity_type='stock', exchange='sz', level=IntervalLevel.LEVEL_1DAY, start_timestamp='2019-06-11', end_timestamp='2019-06-14'): data_reader.move_on(to_timestamp=timestamp) df = data_reader.data_df assert ('stock_sz_002572', timestamp) in df.index assert ('stock_sz_000338', to_time_str(timestamp)) in df.index
def record(self, entity, start, end, size, timestamps): jq_code = code_map_jq.get(entity.code) q = query(finance.STK_MT_TOTAL).filter( finance.STK_MT_TOTAL.exchange_code == jq_code, finance.STK_MT_TOTAL.date >= to_time_str(start)).limit(2000) df = finance.run_query(q) print(df) json_results = [] for item in df.to_dict(orient='records'): result = { 'provider': self.provider, 'timestamp': item['date'], 'name': entity.name, 'margin_value': item['fin_value'], 'margin_buy': item['fin_buy_value'], 'short_value': item['sec_value'], 'short_volume': item['sec_sell_volume'], 'total_value': item['fin_sec_value'] } json_results.append(result) if len(json_results) < 100: self.one_shot = True return json_results
def generate_request_param(self, security_item, start, end, size, timestamp): return { 'security_item': security_item, 'start': to_time_str(start, fmt=TIME_FORMAT_DAY1), 'end': now_time_str(fmt=TIME_FORMAT_DAY1), 'level': self.level.value }
def test_000778_cash_flow_statement(): correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', '2017-06-30', '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', '2015-12-31', '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', '2014-06-30', '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', '2012-12-31', '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', '2011-06-30', '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', '2009-12-31', '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', '2008-06-30', '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', '2006-12-31', '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', '2005-06-30', '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', '2003-12-31', '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-06-30', '2001-12-31', '2001-06-30', '2000-12-31', '2000-06-30', '1999-12-31', '1998-12-31', '1998-06-30'] result = get_cash_flow_statement(session=session, provider='eastmoney', return_type='domain', codes=['000778'], end_timestamp='2018-12-30', order=CashFlowStatement.report_date.desc(), time_field='report_date') assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) latest: CashFlowStatement = result[0] assert latest.cash_from_selling == 27784000000 assert latest.tax_refund == 60700000 assert latest.cash_from_other_op == 1463000000 assert latest.total_op_cash_inflows == 29310000000 assert latest.cash_to_goods_services == 21210000000 assert latest.cash_to_employees == 1460000000 assert latest.taxes_and_surcharges == 2016000000 assert latest.cash_to_other_related_op == 573700000 assert latest.total_op_cash_outflows == 25260000000 assert latest.net_op_cash_flows == 4050000000 assert latest.cash_from_disposal_of_investments == 556500000 assert latest.cash_from_returns_on_investments == 44180000 assert latest.cash_from_disposal_fixed_intangible_assets == 457200 assert latest.cash_from_disposal_subsidiaries == 1046000000 assert latest.cash_from_other_investing == 553000000 assert latest.total_investing_cash_inflows == 2201000000 assert latest.cash_to_acquire_fixed_intangible_assets == 2521000000 assert latest.cash_to_investments == 1808000000 assert latest.total_investing_cash_outflows == 4329000000 assert latest.net_investing_cash_flows == -2128000000 assert latest.cash_from_accepting_investment == 24500000 assert latest.cash_from_subsidiaries_accepting_minority_interest == 24500000 assert latest.cash_from_borrowings == 10080000000 assert latest.cash_from_issuing_bonds == 997000000 assert latest.cash_from_other_financing == 200000000 assert latest.total_financing_cash_inflows == 11300000000 assert latest.cash_to_repay_borrowings == 11940000000 assert latest.cash_to_pay_interest_dividend == 892100000 assert latest.cash_to_other_financing == 328500000 assert latest.total_financing_cash_outflows == 13160000000 assert latest.net_financing_cash_flows == -1862000000 assert latest.foreign_exchange_rate_effect == 21350000 assert latest.net_cash_increase == 81240000 assert latest.cash_at_beginning == 5078000000 assert latest.cash == 5159000000
def marshal_object_for_ui(object): if isinstance(object, Enum): return object.value if isinstance(object, pd.Timestamp): return to_time_str(object) return object
def marshal_data_for_ui(cls, data): if isinstance(data, Enum): return data.value if isinstance(data, pd.Timestamp): return to_time_str(data) return data
def china_stock_finished_timestamp(timestamp: pd.Timestamp, level: IntervalLevel): timestamp = to_pd_timestamp(timestamp) if timestamp.microsecond != 0: return False return to_time_str(timestamp, fmt=TIME_FORMAT_MINUTE1) in china_stock_level_map_finished_timestamps.get( level.value)
def record(self, entity, start, end, size, timestamps): df = pd.DataFrame() dates = get_trade_days(start_date=start) df['timestamp'] = pd.to_datetime(dates) df['id'] = [to_time_str(date) for date in dates] df['entity_id'] = 'stock_sz_000001' df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update)
def report_core_company(): while True: error_count = 0 email_action = EmailInformer() try: # StockTradeDay.record_data(provider='joinquant') # Stock.record_data(provider='joinquant') # FinanceFactor.record_data(provider='eastmoney') # BalanceSheet.record_data(provider='eastmoney') target_date = to_time_str(now_pd_timestamp()) my_selector: TargetSelector = FundamentalSelector( start_timestamp='2015-01-01', end_timestamp=target_date) my_selector.run() long_targets = my_selector.get_open_long_targets( timestamp=target_date) if long_targets: stocks = get_entities(provider='joinquant', entity_schema=Stock, entity_ids=long_targets, return_type='domain') # add them to eastmoney try: try: eastmoneypy.del_group('core') except: pass eastmoneypy.create_group('core') for stock in stocks: eastmoneypy.add_to_group(stock.code, group_name='core') except Exception as e: email_action.send_message( "*****@*****.**", f'report_core_company error', 'report_core_company error:{}'.format(e)) info = [f'{stock.name}({stock.code})' for stock in stocks] msg = ' '.join(info) else: msg = 'no targets' logger.info(msg) email_action.send_message(get_subscriber_emails(), f'{to_time_str(target_date)} 核心资产选股结果', msg) break except Exception as e: logger.exception('report_core_company error:{}'.format(e)) time.sleep(60 * 3) error_count = error_count + 1 if error_count == 10: email_action.send_message( "*****@*****.**", f'report_core_company error', 'report_core_company error:{}'.format(e))
def test_000001_finance_factor(): correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', '2017-06-30', '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', '2015-12-31', '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', '2014-06-30', '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', '2012-12-31', '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', '2011-06-30', '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', '2009-12-31', '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', '2008-06-30', '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', '2006-12-31', '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', '2005-06-30', '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', '2003-12-31', '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-09-30', '2002-06-30', '2002-03-31', '2001-12-31', '2001-09-30', '2001-06-30', '2001-03-31', '2000-12-31', '2000-06-30', '1999-12-31', '1999-06-30', '1998-12-31', '1998-06-30', '1997-12-31', '1997-06-30', '1996-12-31', '1996-06-30', '1995-12-31', '1995-06-30', '1994-12-31', '1994-06-30', '1993-12-31', '1993-06-30', '1992-12-31', '1991-12-31', '1990-12-31', '1989-12-31'] result = get_finance_factor(session=session, provider='eastmoney', return_type='domain', codes=['000001'], end_timestamp='2018-12-30', order=FinanceFactor.report_date.desc(), time_field='report_date') assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) latest: FinanceFactor = result[0] assert latest.basic_eps == 1.14 assert latest.deducted_eps == 1.13 assert latest.diluted_eps == 1.14 assert latest.bps == 12.538 assert latest.capital_reserve_ps == 3.2886 assert latest.undistributed_profit_ps == 5.3566 assert latest.op_cash_flow_ps == -0.6587 assert latest.total_op_income == 86660000000 assert latest.net_profit == 20460000000 assert latest.deducted_net_profit == 20350000000 assert latest.op_income_growth_yoy == 0.0856 assert latest.net_profit_growth_yoy == 0.068 assert latest.deducted_net_profit_growth_yoy == 0.0636 assert latest.op_income_growth_qoq == 0.0336 assert latest.net_profit_growth_qoq == 0.0202 assert latest.deducted_net_profit_growth_qoq == 0.0168 assert latest.roe == 0.0948 assert latest.deducted_roe == 0.0943 assert latest.rota == 0.0062 assert latest.net_margin == 0.2360 assert latest.debt_asset_ratio == 0.9298 assert latest.em == 14.25 assert latest.equity_ratio == 13.25 assert latest.fi_total_deposit == 2130000000000 assert latest.fi_total_loan == 1920000000000 assert latest.fi_loan_deposit_ratio == 0.9004 assert latest.fi_npl_ratio == 0.0168 assert latest.fi_npl_provision_coverage == 1.6914
def test_000778_income_statement(): correct_timestamps = ['2018-09-30', '2018-06-30', '2018-03-31', '2017-12-31', '2017-09-30', '2017-06-30', '2017-03-31', '2016-12-31', '2016-09-30', '2016-06-30', '2016-03-31', '2015-12-31', '2015-09-30', '2015-06-30', '2015-03-31', '2014-12-31', '2014-09-30', '2014-06-30', '2014-03-31', '2013-12-31', '2013-09-30', '2013-06-30', '2013-03-31', '2012-12-31', '2012-09-30', '2012-06-30', '2012-03-31', '2011-12-31', '2011-09-30', '2011-06-30', '2011-03-31', '2010-12-31', '2010-09-30', '2010-06-30', '2010-03-31', '2009-12-31', '2009-09-30', '2009-06-30', '2009-03-31', '2008-12-31', '2008-09-30', '2008-06-30', '2008-03-31', '2007-12-31', '2007-09-30', '2007-06-30', '2007-03-31', '2006-12-31', '2006-09-30', '2006-06-30', '2006-03-31', '2005-12-31', '2005-09-30', '2005-06-30', '2005-03-31', '2004-12-31', '2004-09-30', '2004-06-30', '2004-03-31', '2003-12-31', '2003-09-30', '2003-06-30', '2003-03-31', '2002-12-31', '2002-09-30', '2002-06-30', '2002-03-31', '2001-12-31', '2001-06-30', '2000-12-31', '2000-06-30', '1999-12-31', '1999-06-30', '1998-12-31', '1998-06-30', '1997-12-31', '1997-06-30', '1996-12-31', '1995-12-31', '1994-12-31'] result = get_income_statement(session=session, provider='eastmoney', return_type='domain', codes=['000778'], end_timestamp='2018-12-30', order=IncomeStatement.report_date.desc(), time_field='report_date') assert len(correct_timestamps) == len(result) timestamps = [to_time_str(item.report_date) for item in result] assert set(correct_timestamps) == set(timestamps) latest: IncomeStatement = result[0] assert latest.operating_income == 31710000000 assert latest.total_operating_costs == 29230000000 assert latest.operating_costs == 26220000000 assert latest.rd_costs == 185500000 assert latest.net_change_in_insurance_contract_reserves == 0 assert latest.business_taxes_and_surcharges == 359700000 assert latest.sales_costs == 771400000 assert latest.managing_costs == 472900000 assert latest.financing_costs == 397500000 assert latest.assets_devaluation == 824400000 assert latest.investment_income == 104100000 assert latest.investment_income_from_related_enterprise == 61290000 assert latest.operating_profit == 2637000000 assert latest.non_operating_income == 38340000 assert latest.non_operating_costs == 221700000 assert latest.total_profits == 2454000000 assert latest.tax_expense == 579600000 assert latest.net_profit == 1874000000 assert latest.net_profit_as_parent == 1811000000 assert latest.net_profit_as_minority_interest == 63570000 assert latest.deducted_net_profit == 1897000000 assert latest.eps == 0.4537 assert latest.diluted_eps == 0.4537 assert latest.other_comprehensive_income == -521000000 assert latest.other_comprehensive_income_as_parent == -522400000 assert latest.other_comprehensive_income_as_minority_interest == 1403000 assert latest.total_comprehensive_income == 1353000000 assert latest.total_comprehensive_income_as_parent == 1288000000 assert latest.total_comprehensive_income_as_minority_interest == 64980000