def day_analysis_dict_pipe(): """ day数据分析, 放在数据处理线程中 :return: """ # 判断结果list result_analysis_list = [] jq_login() for tab in dict_stk_list.keys(): stk_list = dict_stk_list[tab] for stk_info in stk_list: stk = stk_info[1] df = get_k_data_JQ(stk, 400) # 其他指标 r_tuple_index_pic = gen_idx_pic_wx(df, stk_code=stk) result_analysis_list = result_analysis_list + r_tuple_index_pic[1] # 日线分析结果汇总 r_tuple_day_pic = gen_day_pic_wx(df, stk_code=stk) result_analysis_list = result_analysis_list + r_tuple_day_pic[1] jq.logout() debug_print_txt('hour_analysis', 'total_stk', str(result_analysis_list) + '\n') return result_analysis_list
def log_out(self): """ 退出 :return: """ if self.log_status: jq.logout()
def run(self): # 抓取fund列表 df = finance.run_query(query(finance.FUND_MAIN_INFO)) df.index.name = 'entity_id' df = df.reset_index() # 上市日期 df.rename(columns={'start_date': 'timestamp'}, inplace=True) df['timestamp'] = pd.to_datetime(df['timestamp']) df['list_date'] = df['timestamp'] df['end_date'] = pd.to_datetime(df['end_date']) df['entity_id'] = df.main_code.apply(lambda x: normalize_code(x)) df['entity_id'] = df['entity_id'].apply( lambda x: to_entity_id(entity_type='fund', jq_code=x)) df['id'] = df['entity_id'] df['entity_type'] = 'fund' df['exchange'] = df['entity_id'].apply( lambda x: get_entity_exchange(x)) df['code'] = df['entity_id'].apply(lambda x: get_entity_code(x)) df['category'] = 'fund' # df['choice_code'] = df.apply(lambda x:x.main_code+'.'+x.exchange.upper(),axis=1) # loginResult = c.start("ForceLogin=1", '') # df['underlying_index_code'] = df.apply(lambda x:c.css(x.choice_code, "BMINDEXCODE", "Rank=1").Data if x.operate_mode == 'ETF' else None,axis=1) # df['underlying_index_code'] = df['underlying_index_code'].apply(lambda x:[i for i in x.values()][0][0].lower().replace(".","_") if x else None) # c.stop() df_to_db(df, data_schema=FundDetail, provider=self.provider, force_update=self.force_update) # self.logger.info(df_index) self.logger.info("persist etf list success") logout()
def close(self): try: self.cur.close() self.conn.close() jq.logout() except: pass
def run(self): # 抓取fund列表 df = finance.run_query(query(finance.FUND_MAIN_INFO)) df.index.name = 'entity_id' df = df.reset_index() # 上市日期 df.rename(columns={'start_date': 'timestamp'}, inplace=True) df['timestamp'] = pd.to_datetime(df['timestamp']) df['list_date'] = df['timestamp'] df['end_date'] = pd.to_datetime(df['end_date']) df['entity_id'] = df.main_code.apply(lambda x: normalize_code(x)) df['entity_id'] = df['entity_id'].apply( lambda x: to_entity_id(entity_type='fund', jq_code=x)) df['id'] = df['entity_id'] df['entity_type'] = 'fund' df['exchange'] = df['entity_id'].apply( lambda x: get_entity_exchange(x)) df['code'] = df['entity_id'].apply(lambda x: get_entity_code(x)) df['category'] = 'fund' df_to_db(df, data_schema=FundDetail, provider=self.provider, force_update=self.force_update) # self.logger.info(df_index) self.logger.info("persist etf list success") logout()
def run(self): # 抓取etf列表 df_index = self.to_zvt_entity(get_all_securities(['etf']), entity_type='etf', category='etf') df_to_db(df_index, data_schema=Etf, provider=self.provider, force_update=self.force_update) # self.logger.info(df_index) self.logger.info("persist etf list success") logout()
def run(self): # 抓取股票列表 df_stock = self.to_zvt_entity(get_all_securities(['stock']), entity_type='stock') df_to_db(df_stock, data_schema=Stock, provider=self.provider, force_update=self.force_update) # persist StockDetail too df_to_db(df=df_stock, data_schema=StockDetail, provider=self.provider, force_update=self.force_update) # self.logger.info(df_stock) self.logger.info("persist stock list success") logout()
def update_rsv_record(self): jq_login() try: code_list = list(set(read_config()['buy_stk'] + read_config()['concerned_stk'] + read_config()['index_stk'])) # global RSV_Record for stk in code_list: RSV_Record[stk] = cal_rsv_rank(stk, 5) except Exception as e: print(str(e)) self.p_ctrl.m_textCtrlMsg.AppendText('RSV数据更新失败!原因:\n' + str(e) + '\n') debug_print_txt('main_log', '', 'RSV数据更新失败!原因:\n' + str(e) + '\n') finally: jq.logout()
chunksize=1000) res2.to_sql(name='t_valuation_2', con=mysqlconnect, schema='jqdata', if_exists='append', index=False, chunksize=1000) res3.to_sql(name='t_valuation_3', con=mysqlconnect, schema='jqdata', if_exists='append', index=False, chunksize=1000) res4.to_sql(name='t_valuation_4', con=mysqlconnect, schema='jqdata', if_exists='append', index=False, chunksize=1000) print("all " + day + "valuation data saved in t_valuation_[0,4] success") return jq.auth('18620668927', 'minpeng123') # 如果传入非交易日,将会获取到之前的最近一个交易日数据,但是删除数据库历史数据删除传入日期,导致后续插入数据失败. # 使用交易日表 按日期循环取历史 get_oneday_valuation('2019-07-05') jq.logout()
def gen_kind_pic(kind, pool): """ 造图片,存本地 :param kind: h:小时 h_idx:小时idx d:天 wm:周、月 idx: 指数 :return: 返回的图片应该 执行page和行号,便于更新! 以多层字典的方式返回结果,第一层区分page,第二层区分行号! """ r_dic = {'Index': {}, 'Buy': {}, 'Concerned': {}} dict_stk_hour = copy.deepcopy(dict_stk_list) jq_login() """ 在外部下载需要的数据,防止多进程中重复连接聚宽 """ for page in dict_stk_hour.keys(): for stk_info in dict_stk_list[page]: stk = stk_info[1] if kind is 'h': r_dic[page][stk + '_d'] = GenPic.gen_hour_macd_values(stk) elif kind is 'h_idx': r_dic[page][stk + '_d'] = GenPic.gen_hour_macd_values(stk)[0] elif kind is 'd': r_dic[page][stk + '_d'] = get_k_data_JQ(stk, 400) elif kind is 'wm': r_dic[page][stk + '_d'] = get_k_data_JQ( stk, count=400).reset_index() elif kind is 'd_idx': r_dic[page][stk + '_d'] = get_k_data_JQ(stk, 400) jq.logout() """ 生成pic """ for page in dict_stk_hour.keys(): for stk_info in dict_stk_list[page]: stk = stk_info[1] # 保存路径 save_dir = hist_pic_dir + get_current_date_str( ) + '/' + stk + kind + '/' file_name = get_current_datetime_str()[:-3].replace( ':', '').replace(' ', '').replace('-', '') + '.png' if not os.path.exists(save_dir): os.makedirs(save_dir) if kind is 'h': r_dic[page][stk + '_res'] = pool.apply_async( GenPic.gen_hour_macd_pic_local, (r_dic[page][stk + '_d'], stk, save_dir + file_name)) elif kind is 'h_idx': r_dic[page][stk + '_res'] = pool.apply_async( GenPic.gen_hour_index_pic_local, (r_dic[page][stk + '_d'], stk, save_dir + file_name)) elif kind is 'd': r_dic[page][stk + '_res'] = pool.apply_async( GenPic.gen_day_pic_local, (r_dic[page][stk + '_d'], stk, save_dir + file_name)) elif kind is 'wm': r_dic[page][stk + '_res'] = pool.apply_async( GenPic.gen_w_m_macd_pic_local, (r_dic[page][stk + '_d'], stk, save_dir + file_name)) elif kind is 'd_idx': r_dic[page][stk + '_res'] = pool.apply_async( GenPic.gen_idx_pic_local, (r_dic[page][stk + '_d'], stk, save_dir + file_name)) # if kind is 'h': # r_dic[page][stk + '_res'] = pool.apply_async(gen_hour_macd_pic_local, ( # r_dic[page][stk + '_d'], stk, 'jq', '', save_dir + file_name)) # # elif kind is 'h_idx': # r_dic[page][stk + '_res'] = pool.apply_async(gen_hour_index_pic_local, # (r_dic[page][stk + '_d'], stk, save_dir + file_name)) # elif kind is 'd': # r_dic[page][stk + '_res'] = pool.apply_async(gen_day_pic_local, # (r_dic[page][stk + '_d'], stk, save_dir + file_name)) # elif kind is 'wm': # r_dic[page][stk + '_res'] = pool.apply_async(gen_w_m_macd_pic_local, # (r_dic[page][stk + '_d'], stk, save_dir + file_name)) # elif kind is 'd_idx': # r_dic[page][stk + '_res'] = pool.apply_async(gen_idx_pic_local, # (r_dic[page][stk + '_d'], stk, save_dir + file_name)) # 在字典中保存图片路径 r_dic[page][stk + '_url'] = save_dir + file_name return r_dic
def on_finish(self): logout()
def run(self): for security_item in self.entities: assert isinstance(security_item, StockDetail) security = to_jq_entity_id(security_item) # 基本资料 df = finance.run_query( query(finance.STK_COMPANY_INFO).filter( finance.STK_COMPANY_INFO.code == security)) if df.empty: continue concept_dict = get_concept(security, date=security_item.timestamp) security_df = pd.DataFrame(index=[0]) security_df['profile'] = df.description.values[0] security_df['main_business'] = df.main_business.values[0] security_df['date_of_establishment'] = to_pd_timestamp( df.establish_date.values[0]) security_df['register_capital'] = df.register_capital.values[0] security_df['industries'] = '' security_df['industries'] = security_df['industries'].apply( lambda x: '、'.join(df[['industry_1', 'industry_2']].values. tolist()[0])) security_df['concept_indices'] = '' security_df['concept_indices'] = security_df['industries'].apply( lambda x: '、'.join([ i['concept_name'] for i in concept_dict[security]['jq_concept'] ])) security_df['area_indices'] = df.province.values[0] df_stk = finance.run_query( query(finance.STK_LIST).filter( finance.STK_LIST.code == security)) if not df_stk.empty: security_df['price'] = df_stk.book_price.values[0] security_df['issues'] = df_stk.ipo_shares.values[0] try: security_df['raising_fund'] = df_stk.ipo_shares.values[ 0] * df_stk.book_price.values[0] except TypeError: pass security_df['timestamp'] = security_item.timestamp security_df['id'] = security_item.id security_df['entity_id'] = security_item.entity_id security_df['code'] = security_item.code security_df['entity_type'] = security_item.entity_type security_df['exchange'] = security_item.exchange security_df['name'] = security_item.name security_df['list_date'] = security_item.list_date security_df['end_date'] = security_item.end_date df_to_db(df=security_df, data_schema=self.data_schema, provider=self.provider, force_update=True) # security_item.profile = df.description[0] # security_item.main_business = df.main_business.values[0] # security_item.date_of_establishment = to_pd_timestamp(df.establish_date.values[0]) # security_item.register_capital = df.register_capital.values[0] # security_item.date_of_establishment = security_item.date_of_establishment.to_pydatetime() # security_item.date_of_establishment = security_item.date_of_establishment.strftime('%Y-%m-%d %H:%M:%S') # 关联行业to_pd_timestamp # security_item.industries = df[['industry_1', 'industry_2']].values.tolist()[0] # 关联概念 # security_item.concept_indices = [i['concept_name'] for i in concept_dict[security]['jq_concept']] # 关联地区 # security_item.area_indices = df.province.values[0] self.sleep() # 发行相关 # df_stk = finance.run_query(query(finance.STK_LIST).filter(finance.STK_LIST.code == security)) # security_item.issue_pe = to_float("--") # security_item.price = to_float(str(df_stk.book_price.values[0])) # security_item.issues = to_float(str(df_stk.ipo_shares.values[0])) # security_item.raising_fund = to_float(str(df_stk.ipo_shares.values[0] * df_stk.book_price.values[0])) # security_item.net_winning_rate = pct_to_float("--") # self.session.commit() self.logger.info('finish recording stock meta for:{}'.format( security_item.code)) self.sleep() logout()
def main(): jq.auth('18620668927', 'minpeng123') get_all_securities() jq.logout()
def logout(self): if self.is_logged_in: with utils.NullPrinter(): jq.logout()
def gen_stk_list_kind_pic(self, kind, pool): """ 造图片,存本地 :param kind: h:小时 h_idx:小时idx d:天 wm:周、月 idx: 指数 :return: 返回的图片应该 执行page和行号,便于更新! 以多层字典的方式返回结果,第一层区分page,第二层区分行号! """ jq_login() """ 在外部下载需要的数据,防止多进程中重复连接聚宽 """ r_dic = {} for stk in self.stk_list: if kind is 'h': r_dic[stk + '_d'] = GenPic.gen_hour_macd_values(stk) elif kind is 'h_idx': r_dic[stk + '_d'] = GenPic.gen_hour_macd_values(stk)[0] elif kind is 'd': r_dic[stk + '_d'] = get_k_data_JQ(stk, 400) elif kind is 'wm': r_dic[stk + '_d'] = get_k_data_JQ( stk, count=400, end_date=get_current_date_str()).reset_index() elif kind is 'd_idx': r_dic[stk + '_d'] = get_k_data_JQ(stk, 400) logout() """ 生成pic """ for stk in self.stk_list: # 保存路径 save_dir = self.save_dir + get_current_date_str( ) + '/' + stk + kind + '/' file_name = get_current_datetime_str()[:-3].replace( ':', '').replace(' ', '').replace('-', '') + '.png' if not os.path.exists(save_dir): os.makedirs(save_dir) try: if kind is 'h': r_dic[stk + '_res'] = pool.apply_async( GenPic.gen_hour_macd_pic_local, (r_dic[stk + '_d'], stk, save_dir + file_name)) elif kind is 'h_idx': r_dic[stk + '_res'] = pool.apply_async( GenPic.gen_hour_index_pic_local, (r_dic[stk + '_d'], stk, save_dir + file_name)) elif kind is 'd': r_dic[stk + '_res'] = pool.apply_async( GenPic.gen_day_pic_local, (r_dic[stk + '_d'], stk, save_dir + file_name)) elif kind is 'wm': r_dic[stk + '_res'] = pool.apply_async( GenPic.gen_w_m_macd_pic_local, (r_dic[stk + '_d'], stk, save_dir + file_name)) elif kind is 'd_idx': r_dic[stk + '_res'] = pool.apply_async( GenPic.gen_idx_pic_local, (r_dic[stk + '_d'], stk, save_dir + file_name)) except Exception as e_: self.log = self.log + '函数 gen_stk_list_kind_pic:\n%s\n' % str( e_) print('函数 gen_stk_list_kind_pic:\n%s\n' % str(e_)) # 在字典中保存图片路径 r_dic[stk + '_url'] = save_dir + file_name return r_dic
def main(b='0', e='0'): jq.auth('***', '***') get_all_price(b, e) jq.logout()
def on_stop(self): super().on_stop() logout()
def on_finish(self): super().on_finish() logout()
def _logout_jqdata(self): jqd.logout()