def test_jq_603220_kdata(): df = technical.get_kdata(entity_id='stock_sh_603220', session=day_k_session, level=IntervalLevel.LEVEL_1DAY, provider='joinquant') print(df) df = technical.get_kdata(entity_id='stock_sh_603220', session=day_1h_session, level=IntervalLevel.LEVEL_1HOUR, provider='joinquant') print(df)
def draw_order_signals(trader_name, render='html'): df_account = get_account(trader_name=trader_name) start_timestamp = df_account['timestamp'][0] end_timestamp = df_account['timestamp'][-1] df_orders = get_orders(trader_name=trader_name) grouped = df_orders.groupby('security_id') page = Page() for security_id, order_df in grouped: kdata = get_kdata(security_id=security_id, provider='netease', start_timestamp=start_timestamp, end_timestamp=end_timestamp) mark_points = order_df kline = draw_kline(df_list=[kdata], markpoints_list=[mark_points], render=None) page.add(kline) if render == 'html': file_name = '{}_signals'.format(trader_name) page.render(get_ui_path(file_name)) elif render == 'notebook': page.render_notebook() return page
def __init__(self, entity_type='stock', exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=False, sleeping_time=5, default_size=2000, one_shot=False, fix_duplicate_way='add', start_timestamp=None, end_timestamp=None, contain_unfinished_data=False, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60) -> None: self.data_schema = get_kdata_schema(entity_type=entity_type, level=level) self.jq_trading_level = to_jq_trading_level(level) super().__init__(entity_type, exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, one_shot, fix_duplicate_way, start_timestamp, end_timestamp, contain_unfinished_data, level, kdata_use_begin_time, close_hour, close_minute, one_day_trading_minutes) self.current_factors = {} for security_item in self.entities: kdata = get_kdata(entity_id=security_item.id, provider=self.provider, level=self.level.value, order=self.data_schema.timestamp.desc(), limit=1, return_type='domain', session=self.session) if kdata: self.current_factors[security_item.id] = kdata[0].factor self.logger.info('{} latest factor:{}'.format(security_item.id, kdata[0].factor)) auth(JQ_ACCOUNT, JQ_PASSWD)
def on_finish(self, security_item): kdatas = get_kdata(security_id=security_item.id, data_schema=ETF1DKdata, level=TradingLevel.LEVEL_1DAY.value, order=ETF1DKdata.timestamp.asc(), return_type='domain', session=self.session, filters=[ETF1DKdata.cumulative_net_value.is_(None)]) if kdatas and len(kdatas) > 0: start = kdatas[0].timestamp end = kdatas[-1].timestamp # 从东方财富获取基金累计净值 df = self.fetch_cumulative_net_value(security_item, start, end) if df is not None and not df.empty: for kdata in kdatas: if kdata.timestamp in df.index: kdata.cumulative_net_value = df.loc[kdata.timestamp, 'LJJZ'] kdata.change_pct = df.loc[kdata.timestamp, 'JZZZL'] self.session.commit() self.logger.info( f'{security_item.code} - {security_item.name}累计净值更新完成...')
def __init__(self) -> None: if self.start_timestamp: self.start_timestamp = to_pd_timestamp(self.start_timestamp) self.start_timestamp = self.trading_level.floor_timestamp(self.start_timestamp) self.current_timestamp = self.start_timestamp else: self.start_timestamp = now_pd_timestamp() if self.end_timestamp: self.end_timestamp = to_pd_timestamp(self.end_timestamp) self.security_type, self.exchange, self.code = decode_security_id(self.security_id) self.kdata_schema = get_kdata_schema(self.security_type) # init history data for model in self.models: datas = \ get_kdata(self.security_id, level=model.trading_level, end_timestamp=self.start_timestamp, order=self.kdata_schema.timestamp.desc(), limit=model.history_size) if datas: model.init_history_data(datas) if not datas: self.logger.warning( "to {}, {} no history data ".format(self.start_timestamp, self.security_id)) elif len(datas) < self.history_data_size: self.logger.warning( "to {}, {} history data size:{}".format(self.start_timestamp, self.security_id, len(datas)))
def __init__(self, security_type=SecurityType.stock, exchanges=['sh', 'sz'], codes=None, batch_size=10, force_update=False, sleeping_time=5, fetching_style=TimeSeriesFetchingStyle.end_size, default_size=2000, contain_unfinished_data=False, level=TradingLevel.LEVEL_1DAY, one_shot=True) -> None: super().__init__(security_type, exchanges, codes, batch_size, force_update, sleeping_time, fetching_style, default_size, contain_unfinished_data, level, one_shot) self.current_factors = {} self.latest_factors = {} for security_item in self.securities: kdata = get_kdata(security_id=security_item.id, provider=self.provider, level=self.level.value, order=StockDayKdata.timestamp.desc(), return_type='domain', session=self.session) if kdata: self.current_factors[security_item.id] = kdata[0].factor
def __init__(self, entity_type='stock', exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=False, sleeping_time=10, default_size=2000, one_shot=True, fix_duplicate_way='add', start_timestamp=None, end_timestamp=None, contain_unfinished_data=False, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, close_hour=0, close_minute=0, one_day_trading_minutes=24 * 60) -> None: super().__init__(entity_type, exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, one_shot, fix_duplicate_way, start_timestamp, end_timestamp, contain_unfinished_data, level, kdata_use_begin_time, close_hour, close_minute, one_day_trading_minutes) self.current_factors = {} self.latest_factors = {} for security_item in self.entities: kdata = get_kdata(entity_id=security_item.id, provider=self.provider, level=self.level.value, order=Stock1dKdata.timestamp.desc(), return_type='domain', session=self.session) if kdata: self.current_factors[security_item.id] = kdata[0].factor
def on_trading_close(self, timestamp): self.logger.info('on_trading_close:{}'.format(timestamp)) self.latest_account['value'] = 0 self.latest_account['all_value'] = 0 for position in self.latest_account['positions']: # use qfq for stock entity_type, _, _ = decode_entity_id(position['entity_id']) data_schema = get_kdata_schema(entity_type, level=self.level) kdata = get_kdata(provider=self.provider, level=self.level, entity_id=position['entity_id'], order=data_schema.timestamp.desc(), end_timestamp=timestamp, limit=1) # use qfq for stock if entity_type == 'stock': closing_price = kdata['qfq_close'][0] else: closing_price = kdata['close'][0] position['available_long'] = position['long_amount'] position['available_short'] = position['short_amount'] if closing_price: if (position['long_amount'] is not None) and position['long_amount'] > 0: position['value'] = position['long_amount'] * closing_price self.latest_account['value'] += position['value'] elif (position['short_amount'] is not None) and position['short_amount'] > 0: position['value'] = 2 * (position['short_amount'] * position['average_short_price']) position[ 'value'] -= position['short_amount'] * closing_price self.latest_account['value'] += position['value'] else: self.logger.warning( 'could not refresh close value for position:{},timestamp:{}' .format(position['entity_id'], timestamp)) # remove the empty position self.latest_account['positions'] = [ position for position in self.latest_account['positions'] if position['long_amount'] > 0 or position['short_amount'] > 0 ] self.latest_account['all_value'] = self.latest_account[ 'value'] + self.latest_account['cash'] self.latest_account['closing'] = True self.latest_account['timestamp'] = to_pd_timestamp(timestamp) self.logger.info('on_trading_close:{},latest_account:{}'.format( timestamp, self.latest_account)) self.persist_account(timestamp)
def on_finish(self, security_item): kdatas = get_kdata( provider=self.provider, security_id=security_item.id, level=self.level.value, order=self.data_schema.timestamp.asc(), return_type='domain', session=self.session, filters=[ self.data_schema.hfq_close.is_(None), self.data_schema.timestamp >= to_pd_timestamp('2005-01-01') ]) if kdatas: start = kdatas[0].timestamp end = kdatas[-1].timestamp # get hfq from joinquant df = get_price(to_jq_security_id(security_item), start_date=to_time_str(start), end_date=now_time_str(), frequency='daily', fields=['factor', 'open', 'close', 'low', 'high'], skip_paused=True, fq='post') if df is not None and not df.empty: # fill hfq data for kdata in kdatas: time_str = to_time_str(kdata.timestamp) if time_str in df.index: kdata.hfq_open = df.loc[time_str, 'open'] kdata.hfq_close = df.loc[time_str, 'close'] kdata.hfq_high = df.loc[time_str, 'high'] kdata.hfq_low = df.loc[time_str, 'low'] kdata.factor = df.loc[time_str, 'factor'] self.session.commit() latest_factor = df.factor[-1] # factor not change yet, no need to reset the qfq past if latest_factor == self.current_factors.get(security_item.id): sql = 'UPDATE {} SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'security_id=\'{}\' and level=\'{}\' and (qfq_close isnull or qfq_high isnull or qfq_low isnull or qfq_open isnull)'.format( self.data_schema.__table__, latest_factor, latest_factor, latest_factor, latest_factor, security_item.id, self.level.value) else: sql = 'UPDATE {} SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'security_id=\'{}\' and level=\'{}\''.format(self.data_schema.__table__, latest_factor, latest_factor, latest_factor, latest_factor, security_item.id, self.level.value) self.logger.info(sql) self.session.execute(sql) self.session.commit() # TODO:use netease provider to get turnover_rate self.logger.info('use netease provider to get turnover_rate')
def on_finish_entity(self, entity): kdatas = get_kdata( entity_id=entity.id, level=self.level.value, order=Stock1dKdata.timestamp.asc(), return_type='domain', session=self.session, filters=[ Stock1dKdata.factor.is_(None), Stock1dKdata.timestamp >= to_pd_timestamp('2005-01-01') ]) if kdatas: start = kdatas[0].timestamp end = kdatas[-1].timestamp # get hfq from joinquant df = get_price(to_jq_entity_id(entity), start_date=to_time_str(start), end_date=now_time_str(), frequency='daily', fields=['factor', 'open', 'close', 'low', 'high'], skip_paused=True, fq='post') if df is not None and not df.empty: # fill hfq data for kdata in kdatas: if kdata.timestamp in df.index: kdata.hfq_open = df.loc[kdata.timestamp, 'open'] kdata.hfq_close = df.loc[kdata.timestamp, 'close'] kdata.hfq_high = df.loc[kdata.timestamp, 'high'] kdata.hfq_low = df.loc[kdata.timestamp, 'low'] kdata.factor = df.loc[kdata.timestamp, 'factor'] self.session.commit() latest_factor = df.factor[-1] # factor not change yet, no need to reset the qfq past if latest_factor == self.current_factors.get(entity.id): sql = 'UPDATE stock_1d_kdata SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'entity_id=\'{}\' and level=\'{}\' and (qfq_close isnull or qfq_high isnull or qfq_low isnull or qfq_open isnull)'.format( latest_factor, latest_factor, latest_factor, latest_factor, entity.id, self.level.value) else: sql = 'UPDATE stock_1d_kdata SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'entity_id=\'{}\' and level=\'{}\''.format(latest_factor, latest_factor, latest_factor, latest_factor, entity.id, self.level.value) self.logger.info(sql) self.session.execute(sql) self.session.commit()
def on_trading_signal(self, trading_signal: TradingSignal): self.logger.info('trader:{} received trading signal:{}'.format( self.trader_name, trading_signal)) security_id = trading_signal.security_id current_timestamp = trading_signal.the_timestamp order_type = AccountService.trading_signal_to_order_type( trading_signal.trading_signal_type) trading_level = trading_signal.trading_level.value if order_type: try: kdata = get_kdata(provider=self.provider, security_id=security_id, level=trading_level, start_timestamp=current_timestamp, end_timestamp=current_timestamp, limit=1) if kdata is not None and not kdata.empty: # use qfq for stock security_type, _, _ = decode_security_id( kdata['security_id'][0]) if security_type == SecurityType.stock: the_price = kdata['qfq_close'][0] else: the_price = kdata['close'][0] if the_price: self.order(security_id=security_id, current_price=the_price, current_timestamp=current_timestamp, order_pct=trading_signal.position_pct, order_money=trading_signal.order_money, order_type=order_type) else: self.logger.warning( 'ignore trading signal,wrong kdata,security_id:{},timestamp:{},kdata:{}' .format(security_id, current_timestamp, kdata.to_dict(orient='records'))) else: self.logger.warning( 'ignore trading signal,could not get kdata,security_id:{},timestamp:{}' .format(security_id, current_timestamp)) except Exception as e: self.logger.exception(e)
def ma(security_id, start_timestamp, end_timestamp, level=TradingLevel.LEVEL_1DAY, provider=Provider.EASTMONEY, window=5): df = technical.get_kdata(security_id, start_timestamp=start_timestamp, end_timestamp=end_timestamp, provider=provider, level=level, columns=get_close_column(security_id)) df = df.rename(columns={'qfq_close': 'close'}) df['ma_{}'.format(window)] = df['close'].rolling( window=window, min_periods=window).mean() return df
def on_next_period(self): for model in self.models: start_timestamp, end_timestamp = model.evaluate_fetch_interval(self.current_timestamp) if start_timestamp and end_timestamp: retry_times = 10 while retry_times > 0: datas = get_kdata(self.security_id, level=model.trading_level.value, start_timestamp=start_timestamp, end_timestamp=end_timestamp) if not datas: self.logger.warning( "no kdata for security:{},trading_level:{},start_timestamp:{} end_timestamp:{} ".format( self.security_id, model.trading_level, start_timestamp, end_timestamp)) retry_times = retry_times - 1 continue for data in datas: series_data = pd.Series(data) series_data.name = to_pd_timestamp(data['timestamp']) model.append_data(series_data) break
def on_finish_entity(self, entity): kdatas = get_kdata(entity_id=entity.id, level=IntervalLevel.LEVEL_1DAY.value, order=Index1dKdata.timestamp.asc(), return_type='domain', session=self.session, filters=[Index1dKdata.cumulative_net_value.is_(None)]) if kdatas and len(kdatas) > 0: start = kdatas[0].timestamp end = kdatas[-1].timestamp # 从东方财富获取基金累计净值 df = self.fetch_cumulative_net_value(entity, start, end) if df is not None and not df.empty: for kdata in kdatas: if kdata.timestamp in df.index: kdata.cumulative_net_value = df.loc[kdata.timestamp, 'LJJZ'] kdata.change_pct = df.loc[kdata.timestamp, 'JZZZL'] self.session.commit() self.logger.info(f'{entity.code} - {entity.name}累计净值更新完成...')
def on_trading_signal(self, trading_signal: TradingSignal): security_id = trading_signal.security_id current_timestamp = trading_signal.the_timestamp order_type = AccountService.trading_signal_to_order_type(trading_signal.trading_signal_type) trading_level = trading_signal.trading_level if order_type: try: kdata = get_kdata(provider=self.provider, security_id=security_id, level=trading_level, start_timestamp=current_timestamp, limit=1) if not kdata.empty and kdata['close'][0]: self.order(security_id=security_id, current_price=kdata['close'][0], current_timestamp=current_timestamp, order_type=order_type, order_pct=trading_signal.position_pct) else: self.logger.warning( 'could not get kdata,security_id:{},timestamp:{}'.format(security_id, current_timestamp)) except Exception as e: self.logger.exception(e)
def __init__(self, security_type=SecurityType.stock, exchanges=['sh', 'sz'], codes=None, batch_size=10, force_update=False, sleeping_time=5, fetching_style=TimeSeriesFetchingStyle.end_size, default_size=2000, contain_unfinished_data=False, level=TradingLevel.LEVEL_1DAY, one_shot=True, start_timestamp=None) -> None: self.data_schema = get_kdata_schema(security_type=security_type, level=level) self.jq_trading_level = to_jq_trading_level(level) self.start_timestamp = to_pd_timestamp(start_timestamp) super().__init__(security_type, exchanges, codes, batch_size, force_update, sleeping_time, fetching_style, default_size, contain_unfinished_data, level, one_shot) self.current_factors = {} for security_item in self.securities: kdata = get_kdata(security_id=security_item.id, provider=self.provider, level=self.level.value, order=self.data_schema.timestamp.desc(), limit=1, return_type='domain', session=self.session) if kdata: self.current_factors[security_item.id] = kdata[0].factor self.logger.info('{} latest factor:{}'.format( security_item.id, kdata[0].factor)) auth(JQ_ACCOUNT, JQ_PASSWD)
def save_closing_account(self, the_date): self.latest_account.value = 0 self.latest_account.all_value = 0 for position in self.latest_account.positions: kdata = get_kdata(provider=self.provider, security_id=position.security_id, end_timestamp=the_date, limit=1) closing_price = kdata['close'] position.available_long = position.long_amount position.available_short = position.short_amount if position.long_amount > 0: position.value = position.long_amount * closing_price self.latest_account.value += position.value elif position.short_amount > 0: position.value = 2 * (position.short_amount * position.average_short_price) position.value -= position.short_amount * closing_price self.latest_account.value += position.value self.latest_account.all_value = self.latest_account.value + self.latest_account.cash self.latest_account.closing = True self.latest_account.timestamp = to_pd_timestamp(the_date) self.save_account(self.latest_account)
def calculate_closing_account(self, the_date): self.account.value = 0 self.account.all_value = 0 for position in self.account.positions: kdata = get_kdata(security_item=position['securityId'], the_date=the_date) closing_price = kdata['close'] position.availableLong = position.longAmount position.availableShort = position.shortAmount if position.longAmount > 0: position.value = position.longAmount * closing_price self.account.value += position.value elif position.shortAmount > 0: position.value = 2 * (position.shortAmount * position.averageShortPrice) position.value -= position.shortAmount * closing_price self.account.value += position.value self.account.all_value = self.account.value + self.account.cash self.account.closing = True self.account.timestamp = the_date self.account_to_queue()
def on_finish_entity(self, entity): kdatas = get_kdata(provider=self.provider, entity_id=entity.id, level=self.level.value, order=self.data_schema.timestamp.asc(), return_type='domain', session=self.session, filters=[self.data_schema.hfq_close.is_(None), self.data_schema.timestamp >= to_pd_timestamp('2005-01-01')]) if kdatas: start = kdatas[0].timestamp end = kdatas[-1].timestamp # get hfq from joinquant df = get_price(to_jq_entity_id(entity), start_date=to_time_str(start), end_date=now_time_str(), frequency='daily', fields=['factor', 'open', 'close', 'low', 'high'], skip_paused=True, fq='post') if df is not None and not df.empty: # fill hfq data for kdata in kdatas: time_str = to_time_str(kdata.timestamp) if time_str in df.index: kdata.hfq_open = df.loc[time_str, 'open'] kdata.hfq_close = df.loc[time_str, 'close'] kdata.hfq_high = df.loc[time_str, 'high'] kdata.hfq_low = df.loc[time_str, 'low'] kdata.factor = df.loc[time_str, 'factor'] self.session.commit() latest_factor = df.factor[-1] # factor not change yet, no need to reset the qfq past if latest_factor == self.current_factors.get(entity.id): sql = 'UPDATE {} SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'entity_id=\'{}\' and level=\'{}\' and (qfq_close isnull or qfq_high isnull or qfq_low isnull or qfq_open isnull)'.format( self.data_schema.__table__, latest_factor, latest_factor, latest_factor, latest_factor, entity.id, self.level.value) else: sql = 'UPDATE {} SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'entity_id=\'{}\' and level=\'{}\''.format(self.data_schema.__table__, latest_factor, latest_factor, latest_factor, latest_factor, entity.id, self.level.value) self.logger.info(sql) self.session.execute(sql) self.session.commit() # use netease provider to get turnover_rate query_url = 'http://quotes.money.163.com/service/chddata.html?code={}{}&start={}&end={}&fields=PCHG;TURNOVER' if entity.exchange == 'sh': exchange_flag = 0 else: exchange_flag = 1 url = query_url.format(exchange_flag, entity.code, to_time_str(start), to_time_str(end)) response = requests.get(url=url) df = read_csv(io.BytesIO(response.content), encoding='GB2312', na_values='None') df['日期'] = pd.to_datetime(df['日期']) df.set_index('日期', drop=True, inplace=True) if df is not None and not df.empty: # fill turnover_rate, pct_change for kdata in kdatas: if kdata.timestamp in df.index: kdata.turnover_rate = df.loc[kdata.timestamp, '换手率'] kdata.change_pct = df.loc[kdata.timestamp, '涨跌幅'] self.session.commit()
:return: :rtype: """ return s.rolling(window=window, min_periods=window).mean() def ema(s, window=12): return s.ewm(span=window, adjust=False, min_periods=window).mean() def macd(s, slow=26, fast=12, n=9): ema_fast = ema(s, window=fast) ema_slow = ema(s, window=slow) diff = ema_fast - ema_slow dea = diff.ewm(span=n, adjust=False).mean() m = (diff - dea) * 2 return diff, dea, m if __name__ == '__main__': kdata = get_kdata(entity_id='stock_sz_000338', start_timestamp='2019-01-01', end_timestamp='2019-05-25', provider='netease') kdata['diff'], kdata['dea'], kdata['m'] = macd(kdata['qfq_close']) print(kdata)
color="#ec0000", color0="#00da3c", border_color="#8A0000", border_color0="#008F28", ), ) .set_global_opts( xaxis_opts=opts.AxisOpts(is_scale=True), yaxis_opts=opts.AxisOpts( is_scale=True, splitarea_opts=opts.SplitAreaOpts( is_show=True, areastyle_opts=opts.AreaStyleOpts(opacity=1) ), ), datazoom_opts=[opts.DataZoomOpts()], title_opts=opts.TitleOpts(title="Kline-ItemStyle"), ) ) if not kline: kline = current_kline else: kline.overlap(current_kline) return kline if __name__ == '__main__': kdata = get_kdata(security_id='stock_sz_300027', provider='netease') draw_kline([kdata])
markpoint_opts=markpoint_opts, itemstyle_opts=opts.ItemStyleOpts( color="#ec0000", color0="#00da3c", border_color="#8A0000", border_color0="#008F28")) if not kline: kline = current_kline else: kline.overlap(current_kline) if render == 'html': kline.render(get_ui_path(file_name)) elif render == 'notebook': kline.render_notebook() return kline if __name__ == '__main__': kdata1 = get_kdata(security_id='stock_sz_000338', provider='netease') kdata2 = get_kdata(security_id='stock_sz_000778', provider='netease') df_list = fill_with_same_index([kdata1, kdata2]) assert len(df_list[0]) == len(df_list[1]) print(df_list[0]) print(df_list[1]) draw_kline(df_list, file_name='test_kline.html')