def __saving_work(code, coll): QA_util_log_info('##JOB07 Now Saving ETF_MIN ==== %s' % (str(code))) try: for type in ['1min', '5min', '15min', '30min', '60min']: ref_ = coll.find({'code': str(code)[0:6], 'type': type}) end_time = str(now_time())[0:19] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['datetime'] QA_util_log_info( '##JOB07.%s Now Saving %s from %s to %s ==%s ' % ([ '1min', '5min', '15min', '30min', '60min' ].index(type), str(code), start_time, end_time, type)) if start_time != end_time: __data = QA_fetch_get_index_min( str(code), start_time, end_time, type) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data[1::])) else: start_time = '2015-01-01' QA_util_log_info( '##JOB07.%s Now Saving %s from %s to %s ==%s ' % ([ '1min', '5min', '15min', '30min', '60min' ].index(type), str(code), start_time, end_time, type)) if start_time != end_time: __data = QA_fetch_get_index_min( str(code), start_time, end_time, type) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)) except: err.append(code)
def QA_SU_save_stock_transaction(client=DATABASE): """save stock_transaction Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ stock_list = QA_fetch_get_stock_time_to_market() coll = client.stock_transaction coll.create_index('code') err = [] def __saving_work(code): QA_util_log_info('##JOB10 Now Saving STOCK_TRANSACTION ==== %s' % (str(code))) try: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_transaction(str(code), str(stock_list[code]), str(now_time())[0:10]))) except: err.append(str(code)) for i_ in range(len(stock_list)): #__saving_work('000001') QA_util_log_info('The %s of Total %s' % (i_, len(stock_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(float(i_ / len(stock_list) * 100))[0:4] + '%') __saving_work(stock_list.index[i_]) if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)
def __saving_work(code, coll): try: ref_ = coll.find({'code': str(code)[0:6]}) end_time = str(now_time())[0:10] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['date'] QA_util_log_info( '##JOB06 Now Saving ETF_DAY==== \n Trying updating %s from %s to %s' % (code, start_time, end_time)) if start_time != end_time: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_index_day(str(code), start_time, end_time)[1::])) else: start_time = '1990-01-01' QA_util_log_info( '##JOB06 Now Saving ETF_DAY==== \n Trying updating %s from %s to %s' % (code, start_time, end_time)) if start_time != end_time: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_index_day(str(code), start_time, end_time))) except: err.append(str(code))
def QA_SU_save_etf_day(client=DATABASE): """save etf_day Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ __index_list = QA_fetch_get_stock_list('etf') coll = client.index_day coll.create_index([('code', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): try: ref_ = coll.find({'code': str(code)[0:6]}) end_time = str(now_time())[0:10] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['date'] QA_util_log_info( '##JOB06 Now Saving ETF_DAY==== \n Trying updating %s from %s to %s' % (code, start_time, end_time)) if start_time != end_time: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_index_day(str(code), start_time, end_time)[1::])) else: start_time = '1990-01-01' QA_util_log_info( '##JOB06 Now Saving ETF_DAY==== \n Trying updating %s from %s to %s' % (code, start_time, end_time)) if start_time != end_time: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_index_day(str(code), start_time, end_time))) except: err.append(str(code)) for i_ in range(len(__index_list)): #__saving_work('000001') QA_util_log_info('The %s of Total %s' % (i_, len(__index_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(float(i_ / len(__index_list) * 100))[0:4] + '%') __saving_work(__index_list.index[i_][0], coll) if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)
def select_best_ip(): QA_util_log_info('Selecting the Best Server IP of TDX') data_stock = [ping(x, 'stock') for x in stock_ip_list] data_future = [ping(x, 'future') for x in future_ip_list] best_stock_ip = stock_ip_list[data_stock.index(min(data_stock))] best_future_ip = future_ip_list[data_future.index(min(data_future))] QA_util_log_info( '=== The BEST SERVER ===\n stock_ip {} future_ip {}'.format( best_stock_ip, best_future_ip)) return {'stock': best_stock_ip, 'future': best_future_ip}
def QA_SU_save_stock_xdxr(client=DATABASE, PROGRESS_INFO=None): """[summary] Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ client.drop_collection('stock_xdxr') stock_list = QA_fetch_get_stock_time_to_market() coll = client.stock_xdxr coll.create_index([('code', pymongo.ASCENDING), ('date', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): QA_util_log_info('##JOB02 Now Saving XDXR INFO ==== %s' % (str(code))) try: coll.insert_many( QA_util_to_json_from_pandas(QA_fetch_get_stock_xdxr( str(code)))) except: err.append(str(code)) for i_ in range(len(stock_list)): QA_util_log_info('The %s of Total %s' % (i_, len(stock_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(float(i_ / len(stock_list) * 100))[0:4] + '%') __saving_work(stock_list.index[i_], coll) if PROGRESS_INFO: PROGRESS_INFO["xdxr_num"] = i_ + 1 PROGRESS_INFO["xdxr_total"] = len(stock_list) if len(err) < 1: QA_util_log_info('SUCCESS') else: try_code = err err = [] QA_util_log_info('Try to get stock xdxr info in erro list! \n') for i__ in range(len(try_code)): QA_util_log_info('The %s of Total %s' % (i__, len(try_code))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(float(i__ / len(try_code) * 100))[0:4] + '%') __saving_work(try_code[i__], coll) if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)
def __saving_work(code, coll): QA_util_log_info('##JOB010 Now Saving STOCK INFO ==== %s' % (str(code))) try: coll.insert_many( QA_util_to_json_from_pandas(QA_fetch_get_stock_info( str(code)))) except: err.append(str(code))
def __saving_work(code): QA_util_log_info('##JOB10 Now Saving STOCK_TRANSACTION ==== %s' % (str(code))) try: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_transaction(str(code), str(stock_list[code]), str(now_time())[0:10]))) except: err.append(str(code))
def save_select_result(code, select_result, para): select_date = select_result_dao.get_last_index_date() contains_code_result = check_result_contains(code, select_result) if contains_code_result is None: single_result_dict = {"code": code, "date": select_date} single_result_dict["parameter"] = [para] select_result.append(single_result_dict) else: single_result_para = para contains_code_result["parameter"].append(single_result_para) # print(single_result_dict) QA_util_log_info(select_result)
def QA_fetch_get_stock_transaction(code, start, end, retry=2, ip=best_ip, port=7709): '历史逐笔成交 buyorsell 1--sell 0--buy 2--盘前' api = TdxHq_API() real_start, real_end = QA_util_get_real_datelist(start, end) if real_start is None: return None real_id_range = [] with api.connect(ip['stock'], port): data = pd.DataFrame() for index_ in range(trade_date_sse.index(real_start), trade_date_sse.index(real_end) + 1): try: data_ = __QA_fetch_get_stock_transaction( code, trade_date_sse[index_], retry, api) if len(data_) < 1: return None except: QA_util_log_info( 'Wrong in Getting %s history transaction data in day %s' % (code, trade_date_sse[index_])) else: QA_util_log_info( 'Successfully Getting %s history transaction data in day %s' % (code, trade_date_sse[index_])) data = data.append(data_) if len(data) > 0: return data.assign( datetime=data['datetime'].apply(lambda x: str(x)[0:19])) else: return None
def __saving_work(code, coll_stock_day): try: QA_util_log_info('##JOB01 Now Saving STOCK_DAY==== %s' % (str(code))) ref = coll_stock_day.find({'code': str(code)[0:6]}) end_date = str(now_time())[0:10] if ref.count() > 0: # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现 start_date = ref[ref.count() - 1]['date'] QA_util_log_info( ' UPDATE_STOCK_DAY \n Trying updating %s from %s to %s' % (code, start_date, end_date)) temp_day_data = QA_fetch_get_stock_day(str(code), start_date, end_date, '00') if temp_day_data.columns.contains("date_stamp"): temp_day_data = temp_day_data.drop("date_stamp", axis=1) if start_date != end_date: coll_stock_day.insert_many( QA_util_to_json_from_pandas(temp_day_data[1::])) else: start_date = '1990-01-01' QA_util_log_info( ' UPDATE_STOCK_DAY \n Trying updating %s from %s to %s' % (code, start_date, end_date)) temp_day_data = QA_fetch_get_stock_day(str(code), start_date, end_date, '00') if temp_day_data.columns.contains("date_stamp"): temp_day_data = temp_day_data.drop("date_stamp", axis=1) if start_date != end_date: coll_stock_day.insert_many( QA_util_to_json_from_pandas(temp_day_data)) except: err.append(str(code)) QA_util_log_info(' UPDATE_STOCK_DAY \n Erroe %s ' % (code, ))
def QA_SU_save_stock_list(client=DATABASE): """save stock_list Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ client.drop_collection('stock_list') coll = client.stock_list coll.create_index('code') err = [] try: QA_util_log_info('##JOB08 Now Saving STOCK_LIST ====') coll.insert_many(QA_util_to_json_from_pandas( QA_fetch_get_stock_list())) except: pass
def start_select_stock(): if not need_to_select_stock_check(): print("不需要选股") return manager = Manager() select_result = manager.list() code_list = QA_fetch_stock_list_adv() code_list = code_list["code"] threadPool = ThreadPool(5) # MACD_select("000825", select_result) # MACD_select("000063", select_result) for item in code_list: threadPool.apply_async(MACD_select, [item, select_result]) threadPool.apply_async(KDJ_select, [item, select_result]) threadPool.close() threadPool.join() if len(select_result) > 0: select_result_dao.save_select_result(select_result) print(select_result) QA_util_log_info(select_result)
def QA_fetch_get_stock_block(ip=best_ip, port=7709): '板块数据' api = TdxHq_API() with api.connect(ip['stock'], port): data = pd.concat([ api.to_df(api.get_and_parse_block_info("block_gn.dat")).assign( type='gn'), api.to_df( api.get_and_parse_block_info("block.dat")).assign(type='yb'), api.to_df(api.get_and_parse_block_info("block_zs.dat")).assign( type='zs'), api.to_df( api.get_and_parse_block_info("block_fg.dat")).assign(type='fg') ]) if len(data) > 10: return data.assign(source='tdx').drop( ['block_type', 'code_index'], axis=1).set_index('code', drop=False, inplace=False).drop_duplicates() else: QA_util_log_info('Wrong with fetch block ')
def QA_SU_save_stock_day(client=DATABASE, PROGRESS_INFO=None): """save stock_day Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ stock_list = QA_fetch_get_stock_time_to_market() coll_stock_day = client.stock_day coll_stock_day.create_index([("code", pymongo.ASCENDING), ("date", pymongo.ASCENDING)]) err = [] def __saving_work(code, coll_stock_day): try: QA_util_log_info('##JOB01 Now Saving STOCK_DAY==== %s' % (str(code))) ref = coll_stock_day.find({'code': str(code)[0:6]}) end_date = str(now_time())[0:10] if ref.count() > 0: # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现 start_date = ref[ref.count() - 1]['date'] QA_util_log_info( ' UPDATE_STOCK_DAY \n Trying updating %s from %s to %s' % (code, start_date, end_date)) temp_day_data = QA_fetch_get_stock_day(str(code), start_date, end_date, '00') if temp_day_data.columns.contains("date_stamp"): temp_day_data = temp_day_data.drop("date_stamp", axis=1) if start_date != end_date: coll_stock_day.insert_many( QA_util_to_json_from_pandas(temp_day_data[1::])) else: start_date = '1990-01-01' QA_util_log_info( ' UPDATE_STOCK_DAY \n Trying updating %s from %s to %s' % (code, start_date, end_date)) temp_day_data = QA_fetch_get_stock_day(str(code), start_date, end_date, '00') if temp_day_data.columns.contains("date_stamp"): temp_day_data = temp_day_data.drop("date_stamp", axis=1) if start_date != end_date: coll_stock_day.insert_many( QA_util_to_json_from_pandas(temp_day_data)) except: err.append(str(code)) QA_util_log_info(' UPDATE_STOCK_DAY \n Erroe %s ' % (code, )) for item in range(len(stock_list)): QA_util_log_info('The %s of Total %s' % (item, len(stock_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(float(item / len(stock_list) * 100))[0:4] + '%') __saving_work(stock_list.index[item], coll_stock_day) if PROGRESS_INFO: PROGRESS_INFO["stock_day_num"] = item + 1 PROGRESS_INFO["stock_day_total"] = len(stock_list) if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)
def QA_SU_save_etf_min(client=DATABASE): """save etf_min Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ __index_list = QA_fetch_get_stock_list('etf') coll = client.index_min coll.create_index([('code', pymongo.ASCENDING), ('time_stamp', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): QA_util_log_info('##JOB07 Now Saving ETF_MIN ==== %s' % (str(code))) try: for type in ['1min', '5min', '15min', '30min', '60min']: ref_ = coll.find({'code': str(code)[0:6], 'type': type}) end_time = str(now_time())[0:19] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['datetime'] QA_util_log_info( '##JOB07.%s Now Saving %s from %s to %s ==%s ' % ([ '1min', '5min', '15min', '30min', '60min' ].index(type), str(code), start_time, end_time, type)) if start_time != end_time: __data = QA_fetch_get_index_min( str(code), start_time, end_time, type) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data[1::])) else: start_time = '2015-01-01' QA_util_log_info( '##JOB07.%s Now Saving %s from %s to %s ==%s ' % ([ '1min', '5min', '15min', '30min', '60min' ].index(type), str(code), start_time, end_time, type)) if start_time != end_time: __data = QA_fetch_get_index_min( str(code), start_time, end_time, type) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)) except: err.append(code) executor = ThreadPoolExecutor(max_workers=4) res = { executor.submit(__saving_work, __index_list.index[i_][0], coll) for i_ in range(len(__index_list)) } # multi index ./. count = 0 for i_ in concurrent.futures.as_completed(res): QA_util_log_info('The %s of Total %s' % (count, len(__index_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(float(count / len(__index_list) * 100))[0:4] + '%') count = count + 1 if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)