def QA_SU_save_stock_transaction(client=DATABASE): """save stock_transaction Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll = client.stock_transaction coll.create_index('code') err = [] def __saving_work(code): QA_util_log_info('##JOB10 Now Saving STOCK_TRANSACTION ==== {}'.format( str(code))) try: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_transaction(str(code), str(stock_list[code]), str(now_time())[0:10]))) except: err.append(str(code)) for i_ in range(len(stock_list)): #__saving_work('000001') QA_util_log_info('The {} of Total {}'.format(i_, len(stock_list))) QA_util_log_info('DOWNLOAD PROGRESS {} '.format( str(float(i_ / len(stock_list) * 100))[0:4] + '%')) __saving_work(stock_list[i_]) if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info(' ERROR CODE \n ') QA_util_log_info(err)
def get_today_all(output='pd'): """today all Returns: [type] -- [description] """ data = [] today = str(datetime.date.today()) codes = QA_fetch_get_stock_list('stock').code.tolist() bestip = select_best_ip()['stock'] for code in codes: try: l = QA_fetch_get_stock_day(code, today, today, '00', ip=bestip) except: bestip = select_best_ip()['stock'] l = QA_fetch_get_stock_day(code, today, today, '00', ip=bestip) if l is not None: data.append(l) res = pd.concat(data) if output in ['pd']: return res elif output in ['QAD']: return QA_DataStruct_Stock_day( res.set_index(['date', 'code'], drop=False))
def QA_SU_save_index_day(client=QA_Setting.client): __index_list = QA_fetch_get_stock_list('index') __coll = client.quantaxis.index_day __coll.ensure_index('code') __err = [] def __saving_work(code, __coll): try: ref_ = __coll.find({'code': str(code)[0:6]}) end_time = end_date = str(now_time())[0:10] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['date'] else: start_time = '1990-01-01' QA_util_log_info( '##JOB04 Now Saving INDEX_DAY==== \n Trying updating %s from %s to %s' % (code, start_time, end_time)) if start_time != end_time: __coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_index_day(str(code), start_time, end_time)[1::])) except: __err.append(str(code)) for i_ in range(len(__index_list)): #__saving_work('000001') QA_util_log_info('The %s of Total %s' % (i_, len(__index_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(float(i_ / len(__index_list) * 100))[0:4] + '%') __saving_work(__index_list.index[i_][0], __coll)
def QA_SU_save_stock_list(client=DATABASE, ui_log=None, ui_progress=None): """save stock_list Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ client.drop_collection('stock_list') coll = client.stock_list coll.create_index('code') err = [] try: # 🛠todo 这个应该是第一个任务 JOB01, 先更新股票列表!! QA_util_log_info('##JOB08 Now Saving STOCK_LIST ====', ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=5000) stock_list_from_tdx = QA_fetch_get_stock_list() pandas_data = QA_util_to_json_from_pandas(stock_list_from_tdx) coll.insert_many(pandas_data) QA_util_log_info("完成股票列表获取", ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=10000) except Exception as e: QA_util_log_info(e, ui_log=ui_log) print(" Error save_tdx.QA_SU_save_stock_list exception!") pass
def get_list(type_=''): """ return data list or dataframe depends on given type_ """ type_list = ['stock', 'index', 'etf', 'future'] '''todo: option list''' type_ = type_.split('_') frequence = type_[1] if type_[0] in type_list: if type_[0] == 'future': lst = QA_fetch_get_future_list() lst = lst if frequence == 'list' else lst.code.unique().tolist() if 'all' not in type_ and frequence != 'list': lst = [item for item in lst if str(item)[-2:] in ['L8', 'L9']] else: lst = QA_fetch_get_stock_list(type_=type_[0]) lst = lst if frequence == 'list' else lst.code.unique().tolist() if len(lst) > 0: return lst, frequence else: return None, None return None, None
def QA_SU_save_stock_xdxr(client=DATABASE, ui_log=None, ui_progress=None): """[summary] Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ client.drop_collection('stock_xdxr') stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll = client.stock_xdxr coll.create_index([('code', pymongo.ASCENDING), ('date', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): QA_util_log_info('##JOB02 Now Saving XDXR INFO ==== {}'.format( str(code)), ui_log=ui_log) try: coll.insert_many( QA_util_to_json_from_pandas(QA_fetch_get_stock_xdxr( str(code)))) except: err.append(str(code)) for i_ in range(len(stock_list)): QA_util_log_info('The {} of Total {}'.format(i_, len(stock_list)), ui_log=ui_log) strLogInfo = 'DOWNLOAD PROGRESS {} '.format( str(float(i_ / len(stock_list) * 100))[0:4] + '%') intLogProgress = int(float(i_ / len(stock_list) * 100)) QA_util_log_info(strLogInfo, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intLogProgress) __saving_work(stock_list[i_], coll) if len(err) < 1: QA_util_log_info('^_SUCCESS_^', ui_log=ui_log) else: try_code = err err = [] QA_util_log_info('Try to get stock xdxr info in erro list! \n', ui_log=ui_log) for i__ in range(len(try_code)): QA_util_log_info('The {} of Total {}'.format(i__, len(try_code)), ui_log=ui_log) QA_util_log_info('DOWNLOAD PROGRESS {} '.format( str(float(i__ / len(try_code) * 100))[0:4] + '%'), ui_log=ui_log) __saving_work(try_code[i__], coll) if len(err) < 1: QA_util_log_info('^_SUCCESS^_', ui_log=ui_log) else: QA_util_log_info('^_ ERROR CODE ^_\n ', ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
def get(self): currentlist = pd.concat([ QA_fetch_get_stock_list().assign(market='stock_cn'), # QA_fetch_get_index_list().assign(market='index_cn'), # QA_fetch_get_hkstock_list().assign(market='stock_hk'), QA_fetch_get_future_list().assign(market='future_cn')], sort=False) data = (currentlist.code + '/' + currentlist.name + '/' + currentlist.market).tolist() self.write({'result': data})
def QA_SU_save_stock_year(client=DATABASE, ui_log=None, ui_progress=None): """save stock_year Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll_stock_year = client.stock_year coll_stock_year.create_index( [("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)]) err = [] def __saving_work(code, coll_stock_year): try: QA_util_log_info( '##JOB01 Now Saving STOCK_YEAR==== {}'.format(str(code)), ui_log=ui_log) ref = coll_stock_year.find({'code': str(code)[0:6]}) end_date = str(now_time())[0:10] if ref.count() > 0: # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现 start_date = ref[ref.count() - 1]['date'] QA_util_log_info('UPDATE_STOCK_YEAR \n Trying updating {} from {} to {}'.format (code, start_date, end_date), ui_log=ui_log) if start_date != end_date: coll_stock_year.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_day(str(code), QA_util_get_next_day(start_date), end_date, '00', frequence='year'))) else: start_date = '1990-01-01' QA_util_log_info('UPDATE_STOCK_YEAR \n Trying updating {} from {} to {}'.format (code, start_date, end_date), ui_log=ui_log) if start_date != end_date: coll_stock_year.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_day(str(code), start_date, end_date, '00', frequence='year'))) except: err.append(str(code)) for item in range(len(stock_list)): QA_util_log_info('The {} of Total {}'.format( item, len(stock_list)), ui_log=ui_log) strProgress = 'DOWNLOAD PROGRESS {} '.format( str(float(item / len(stock_list) * 100))[0:4] + '%') intProgress = int(float(item / len(stock_list) * 100)) QA_util_log_info(strProgress, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intProgress) __saving_work(stock_list[item], coll_stock_year) if len(err) < 1: QA_util_log_info('SUCCESS', ui_log=ui_log) else: QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_etf_day(client=DATABASE): """save etf_day Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ __index_list = QA_fetch_get_stock_list('etf') coll = client.index_day coll.create_index([('code', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): try: ref_ = coll.find({'code': str(code)[0:6]}) end_time = str(now_time())[0:10] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['date'] QA_util_log_info( '##JOB06 Now Saving ETF_DAY==== \n Trying updating {} from {} to {}' .format(code, start_time, end_time)) if start_time != end_time: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_index_day( str(code), QA_util_get_next_day(start_time), end_time))) else: start_time = '1990-01-01' QA_util_log_info( '##JOB06 Now Saving ETF_DAY==== \n Trying updating {} from {} to {}' .format(code, start_time, end_time)) if start_time != end_time: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_index_day(str(code), start_time, end_time))) except: err.append(str(code)) for i_ in range(len(__index_list)): #__saving_work('000001') QA_util_log_info('The {} of Total {}'.format(i_, len(__index_list))) QA_util_log_info('DOWNLOAD PROGRESS {} '.format( str(float(i_ / len(__index_list) * 100))[0:4] + '%')) __saving_work(__index_list.index[i_][0], coll) if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info(' ERROR CODE \n ') QA_util_log_info(err)
def QA_SU_save_etf_min(client=DATABASE): __index_list = QA_fetch_get_stock_list('etf') coll = client.index_min coll.create_index([('code', pymongo.ASCENDING), ('time_stamp', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): QA_util_log_info('##JOB07 Now Saving ETF_MIN ==== %s' % (str(code))) try: for type in ['1min', '5min', '15min', '30min', '60min']: ref_ = coll.find( {'code': str(code)[0:6], 'type': type}) end_time = str(now_time())[0:19] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['datetime'] QA_util_log_info( '##JOB07.%s Now Saving %s from %s to %s ==%s ' % (['1min', '5min', '15min', '30min', '60min'].index(type), str(code), start_time, end_time, type)) if start_time != end_time: __data = QA_fetch_get_index_min( str(code), start_time, end_time, type) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data[1::])) else: start_time = '2015-01-01' QA_util_log_info( '##JOB07.%s Now Saving %s from %s to %s ==%s ' % (['1min', '5min', '15min', '30min', '60min'].index(type), str(code), start_time, end_time, type)) if start_time != end_time: __data = QA_fetch_get_index_min( str(code), start_time, end_time, type) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)) except: err.append(code) executor = ThreadPoolExecutor(max_workers=4) res = {executor.submit( __saving_work, __index_list.index[i_][0], coll) for i_ in range(len(__index_list))} # multi index ./. count = 0 for i_ in concurrent.futures.as_completed(res): QA_util_log_info('The %s of Total %s' % (count, len(__index_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str( float(count / len(__index_list) * 100))[0:4] + '%') count = count + 1 if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)
def QA_SU_save_stock_list(client=DATABASE): client.drop_collection('stock_list') coll = client.stock_list coll.create_index('code') err = [] try: QA_util_log_info('##JOB08 Now Saving STOCK_LIST ====') coll.insert_many(QA_util_to_json_from_pandas( QA_fetch_get_stock_list())) except: pass
def QA_SU_save_stock_list(client=QA_Setting.client): client.quantaxis.drop_collection('stock_list') __coll = client.quantaxis.stock_list __coll.ensure_index('code') __err = [] try: QA_util_log_info('##JOB08 Now Saving STOCK_LIST ====') __coll.insert_many( QA_util_to_json_from_pandas(QA_fetch_get_stock_list())) except: pass
def QA_SU_save_etf_day(client=QA_Setting.client): __index_list = QA_fetch_get_stock_list('etf') coll = client.quantaxis.index_day coll.create_index([('code', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): try: ref_ = coll.find({'code': str(code)[0:6]}) end_time = str(now_time())[0:10] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['date'] QA_util_log_info( '##JOB06 Now Saving ETF_DAY==== \n Trying updating %s from %s to %s' % (code, start_time, end_time)) if start_time != end_time: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_index_day(str(code), start_time, end_time)[1::])) else: start_time = '1990-01-01' QA_util_log_info( '##JOB06 Now Saving ETF_DAY==== \n Trying updating %s from %s to %s' % (code, start_time, end_time)) if start_time != end_time: coll.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_index_day(str(code), start_time, end_time))) except: err.append(str(code)) for i_ in range(len(__index_list)): #__saving_work('000001') QA_util_log_info('The %s of Total %s' % (i_, len(__index_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(float(i_ / len(__index_list) * 100))[0:4] + '%') __saving_work(__index_list.index[i_][0], coll) if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)
def _QA_SU_save_index_or_etf_day(index__or_etf, client, ui_log, ui_progress): index_list = QA_fetch_get_stock_list(index__or_etf).code.tolist() coll = get_coll(client) ips = get_ip_list_by_multi_process_ping(stock_ip_list, _type='stock')[ :cpu_count() * 2 + 1] ps = QA_SU_save_index_day_parallelism( processes=cpu_count() if len(ips) >= cpu_count() else len(ips), client=client, ui_log=ui_log) # 单线程测试 # ps = QA_SU_save_index_day_parallelism( # processes=1 if len(ips) >= cpu_count() else len(ips), # client=client, ui_log=ui_log) ps.total_counts = len(index_list) ps.run(index_list)
def QA_SU_save_index_min(client=QA_Setting.client): __index_list = QA_fetch_get_stock_list('index') __coll = client.quantaxis.index_min __coll.ensure_index('code') __err = [] def __saving_work(code, __coll): QA_util_log_info('##JOB05 Now Saving Index_MIN ==== %s' % (str(code))) try: for type in ['1min', '5min', '15min', '30min', '60min']: ref_ = __coll.find({'code': str(code)[0:6], 'type': type}) end_time = str(datetime.datetime.now())[0:19] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['datetime'] else: start_time = '2015-01-01' QA_util_log_info( '##JOB05.%s Now Saving %s from %s to %s ==%s ' % (['1min', '5min', '15min', '30min', '60min' ].index(type), str(code), start_time, end_time, type)) if start_time != end_time: __data = QA_fetch_get_index_min(str(code), start_time, end_time, type) if len(__data) > 1: __coll.insert_many( QA_util_to_json_from_pandas(__data[1::])) except: __err.append(code) executor = ThreadPoolExecutor(max_workers=4) res = { executor.submit(__saving_work, __index_list.index[i_][0], __coll) for i_ in range(len(__index_list)) } # multi index ./. count = 0 for i_ in concurrent.futures.as_completed(res): QA_util_log_info('The %s of Total %s' % (count, len(__index_list))) QA_util_log_info('DOWNLOAD PROGRESS %s ' % str(float(count / len(__index_list) * 100))[0:4] + '%') count = count + 1 QA_util_log_info('ERROR CODE \n ') QA_util_log_info(__err)
def QA_SU_save_stock_xdxr(client=DATABASE, ui_log=None, ui_progress=None): """[summary] Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ stock_list = QA_fetch_get_stock_list().code.unique().tolist() # client.drop_collection('stock_xdxr') try: coll = client.stock_xdxr coll.create_index([('code', pymongo.ASCENDING), ('date', pymongo.ASCENDING)], unique=True) except: client.drop_collection('stock_xdxr') coll = client.stock_xdxr coll.create_index([('code', pymongo.ASCENDING), ('date', pymongo.ASCENDING)], unique=True) err = [] def __saving_work(code, coll): QA_util_log_info('##JOB02 Now Saving XDXR INFO ==== {}'.format( str(code)), ui_log=ui_log) try: coll.insert_many(QA_util_to_json_from_pandas( QA_fetch_get_stock_xdxr(str(code))), ordered=False) except: err.append(str(code)) for i_ in range(len(stock_list)): QA_util_log_info('The {} of Total {}'.format(i_, len(stock_list)), ui_log=ui_log) strLogInfo = 'DOWNLOAD PROGRESS {} '.format( str(float(i_ / len(stock_list) * 100))[0:4] + '%') intLogProgress = int(float(i_ / len(stock_list) * 100)) QA_util_log_info(strLogInfo, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intLogProgress) __saving_work(stock_list[i_], coll)
def QA_SU_save_stock_transaction(client=DATABASE, ui_log=None, ui_progress=None): """save stock_transaction Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll = client.stock_transaction coll.create_index('code') err = [] def __saving_work(code): QA_util_log_info('##JOB11 Now Saving STOCK_TRANSACTION ==== {}'.format( str(code)), ui_log=ui_log) try: coll.insert_many( QA_util_to_json_from_pandas( # 🛠todo str(stock_list[code]) 参数不对? QA_fetch_get_stock_transaction(str(code), '1990-01-01', str(now_time())[0:10]))) except: err.append(str(code)) for i_ in range(len(stock_list)): # __saving_work('000001') QA_util_log_info('The {} of Total {}'.format(i_, len(stock_list)), ui_log=ui_log) strLogProgress = 'DOWNLOAD PROGRESS {} '.format( str(float(i_ / len(stock_list) * 100))[0:4] + '%') intLogProgress = int(float(i_ / len(stock_list) * 10000.0)) QA_util_log_info(strLogProgress, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intLogProgress) __saving_work(stock_list[i_]) if len(err) < 1: QA_util_log_info('SUCCESS', ui_log=ui_log) else: QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_list(client=QA_Setting.client): client.quantaxis.drop_collection('stock_list') coll = client.quantaxis.stock_list coll.create_index('code') err = [] try: QA_util_log_info('##JOB08 Now Saving STOCK_LIST ====') coll.insert_many(QA_util_to_json_from_pandas( QA_fetch_get_stock_list())) except: pass if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info('ERROR CODE \n ') QA_util_log_info(err)
def QA_SU_save_stock_list(client=DATABASE): """save stock_list Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ client.drop_collection('stock_list') coll = client.stock_list coll.create_index('code') err = [] try: QA_util_log_info('##JOB08 Now Saving STOCK_LIST ====') coll.insert_many(QA_util_to_json_from_pandas( QA_fetch_get_stock_list())) except: pass
def QA_SU_save_stock_info(client=DATABASE, ui_log=None, ui_progress=None): """save stock_info Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ client.drop_collection('stock_info') stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll = client.stock_info coll.create_index('code') err = [] def __saving_work(code, coll): QA_util_log_info('##JOB010 Now Saving STOCK INFO ==== {}'.format( str(code)), ui_log=ui_log) try: coll.insert_many( QA_util_to_json_from_pandas(QA_fetch_get_stock_info( str(code)))) except: err.append(str(code)) for i_ in range(len(stock_list)): # __saving_work('000001') strLogProgress = 'DOWNLOAD PROGRESS {} '.format( str(float(i_ / len(stock_list) * 100))[0:4] + '%') intLogProgress = int(float(i_ / len(stock_list) * 10000.0)) QA_util_log_info('The {} of Total {}'.format(i_, len(stock_list))) QA_util_log_info(strLogProgress, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intLogProgress) __saving_work(stock_list[i_], coll) if len(err) < 1: QA_util_log_info('SUCCESS', ui_log=ui_log) else: QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_xdxr(client=DATABASE, ui_log=None, ui_progress=None): """[summary] Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ # stock_list = QA_fetch_get_stock_list().code.unique().tolist() stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll = get_coll(client, "stock_xdxr", "stock_xdxr") coll_adj = get_coll(client, cacheName="stock_adj", tableName="stock_adj") ips = get_ip_list_by_multi_process_ping( stock_ip_list, _type='stock')[:cpu_count() * 2 + 1] # 单线程测试 ps = QA_SU_save_stock_xdxr_parallelism( processes=cpu_count() if len(ips) >= cpu_count() else len(ips), client=client, ui_log=ui_log) ps.total_counts = len(stock_list) ps.run(stock_list)
def QA_SU_save_stock_list(client=DATABASE): """save stock_list Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ client.drop_collection('stock_list') coll = client.stock_list coll.create_index('code') err = [] try: QA_util_log_info('##JOB08 Now Saving STOCK_LIST ====') stock_list_from_tdx = QA_fetch_get_stock_list() pandas_data = QA_util_to_json_from_pandas(stock_list_from_tdx) coll.insert_many(pandas_data) QA_util_log_info("完成股票列表获取") except: print(" Error save_tdx.QA_SU_save_stock_list exception!") pass
def get(self): bond_list = QA_fetch_get_bond_list() #.assign(market='bond_cn'), ts_bond_list = QA_fetch_bond_list_adv(DATABASE.bond_list_ts) cbond = list(ts_bond_list[ts_bond_list.list_date > '2002-01-01'].code) missing_cb_l = [ cb for cb in cbond if cb[:6] not in list(bond_list.code) ] missing_cb = ts_bond_list[ts_bond_list.code.isin(missing_cb_l)][[ 'code', 'name', 'sse' ]].drop_duplicates() missing_cb.code = missing_cb.code.apply(lambda x: x[:6]) currentlist = pd.concat( [ QA_fetch_get_stock_list().assign(market='stock_cn'), QA_fetch_get_bond_list().assign(market='bond_cn'), missing_cb.assign(market='bond_cn'), QA_fetch_get_index_list().assign(market='index_cn'), # QA_fetch_get_hkstock_list().assign(market='stock_hk'), QA_fetch_get_future_list().assign(market='future_cn') ], sort=False) data = (currentlist.code + '/' + currentlist.name + '/' + currentlist.market).tolist() self.write({'result': data})
def QA_SU_save_stock_day(client=DATABASE, ui_log=None, ui_progress=None): ''' save stock_day 保存日线数据 :param client: :param ui_log: 给GUI qt 界面使用 :param ui_progress: 给GUI qt 界面使用 :param ui_progress_int_value: 给GUI qt 界面使用 ''' stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll_stock_day = client.stock_day coll_stock_day.create_index([("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)]) err = [] # saveing result def __gen_param(stock_list, coll_stock_day, ip_list=[]): results = [] count = len(ip_list) total = len(stock_list) for item in range(len(stock_list)): try: code = stock_list[item] QA_util_log_info( '##JOB01 Now Saving STOCK_DAY=== {}'.format(str(code)), ui_log) # 首选查找数据库 是否 有 这个代码的数据 search_cond = {'code': str(code)[0:6]} ref = coll_stock_day.find(search_cond) end_date = str(now_time())[0:10] ref_count = coll_stock_day.count_documents(search_cond) # 当前数据库已经包含了这个代码的数据, 继续增量更新 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现 if ref_count > 0: # 接着上次获取的日期继续更新 start_date = ref[ref_count - 1]['date'] # print("ref[ref.count() - 1]['date'] {} {}".format(ref.count(), coll_stock_day.count_documents({'code': str(code)[0:6]}))) else: # 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据 start_date = '1990-01-01' QA_util_log_info( 'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'. format(code, start_date, end_date), ui_log) if start_date != end_date: # 更新过的,不更新 results.extend([(code, start_date, end_date, '00', 'day', ip_list[item % count]['ip'], ip_list[item % count]['port'], item, total, ui_log, ui_progress)]) except Exception as error0: print('Exception:{}'.format(error0)) err.append(code) return results ips = get_ip_list_by_multi_process_ping( stock_ip_list, _type='stock')[:cpu_count() * 2 + 1] param = __gen_param(stock_list, coll_stock_day, ips) ps = QA_SU_save_stock_day_parallelism( processes=cpu_count() if len(ips) >= cpu_count() else len(ips), client=client, ui_log=ui_log) ps.run(do_saving_work, param) if len(err) < 1: QA_util_log_info('SUCCESS save stock day ^_^', ui_log) else: QA_util_log_info('ERROR CODE \n ', ui_log) QA_util_log_info(err, ui_log)
def QA_SU_save_etf_min(client=DATABASE, ui_log=None, ui_progress=None): """save etf_min Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ __index_list = QA_fetch_get_stock_list('etf') coll = client.index_min coll.create_index([('code', pymongo.ASCENDING), ('time_stamp', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): QA_util_log_info('##JOB07 Now Saving ETF_MIN ==== {}'.format( str(code)), ui_log=ui_log) try: for type in ['1min', '5min', '15min', '30min', '60min']: ref_ = coll.find({'code': str(code)[0:6], 'type': type}) end_time = str(now_time())[0:19] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['datetime'] QA_util_log_info( '##JOB07.{} Now Saving {} from {} to {} =={} '.format( ['1min', '5min', '15min', '30min', '60min'].index(type), str(code), start_time, end_time, type), ui_log=ui_log) if start_time != end_time: __data = QA_fetch_get_index_min( str(code), start_time, end_time, type) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data[1::])) else: start_time = '2015-01-01' QA_util_log_info( '##JOB07.{} Now Saving {} from {} to {} =={} '.format( ['1min', '5min', '15min', '30min', '60min'].index(type), str(code), start_time, end_time, type), ui_log=ui_log) if start_time != end_time: __data = QA_fetch_get_index_min( str(code), start_time, end_time, type) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)) except: err.append(code) executor = ThreadPoolExecutor(max_workers=4) res = { executor.submit(__saving_work, __index_list.index[i_][0], coll) for i_ in range(len(__index_list)) } # multi index ./. count = 0 for i_ in concurrent.futures.as_completed(res): QA_util_log_info('The {} of Total {}'.format(count, len(__index_list)), ui_log=ui_log) strLogProgress = 'DOWNLOAD PROGRESS {} '.format( str(float(count / len(__index_list) * 100))[0:4] + '%') intLogProgress = int(float(count / len(__index_list) * 10000.0)) QA_util_log_info(strLogProgress, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intLogProgress) count = count + 1 if len(err) < 1: QA_util_log_info('SUCCESS', ui_log=ui_log) else: QA_util_log_info(' ERROR CODE \n ', ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_day(client=DATABASE, ui_log=None, ui_progress=None): ''' save stock_day 保存日线数据 :param client: :param ui_log: 给GUI qt 界面使用 :param ui_progress: 给GUI qt 界面使用 :param ui_progress_int_value: 给GUI qt 界面使用 :return: ''' stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll_stock_day = client.stock_day coll_stock_day.create_index([("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)]) err = [] def __saving_work(code, coll_stock_day): try: QA_util_log_info( '##JOB01 Now Saving STOCK_DAY==== {}'.format(str(code)), ui_log) # 首选查找数据库 是否 有 这个代码的数据 ref = coll_stock_day.find({'code': str(code)[0:6]}) end_date = str(now_time())[0:10] # 当前数据库已经包含了这个代码的数据, 继续增量更新 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现 if ref.count() > 0: # 接着上次获取的日期继续更新 start_date = ref[ref.count() - 1]['date'] QA_util_log_info( 'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'. format(code, start_date, end_date), ui_log) if start_date != end_date: coll_stock_day.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_day( str(code), QA_util_get_next_day(start_date), end_date, '00'))) # 当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据 else: start_date = '1990-01-01' QA_util_log_info( 'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'. format(code, start_date, end_date), ui_log) if start_date != end_date: coll_stock_day.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_day(str(code), start_date, end_date, '00'))) except Exception as error0: print(error0) err.append(str(code)) for item in range(len(stock_list)): QA_util_log_info('The {} of Total {}'.format(item, len(stock_list))) strProgressToLog = 'DOWNLOAD PROGRESS {} {}'.format( str(float(item / len(stock_list) * 100))[0:4] + '%', ui_log) intProgressToLog = int(float(item / len(stock_list) * 100)) QA_util_log_info(strProgressToLog, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intProgressToLog) __saving_work(stock_list[item], coll_stock_day) if len(err) < 1: QA_util_log_info('SUCCESS save stock day ^_^', ui_log) else: QA_util_log_info(' ERROR CODE \n ', ui_log) QA_util_log_info(err, ui_log)
def QA_SU_gm_save_stock_min(client=DATABASE, ui_log=None, ui_progress=None): """ 掘金量化实现方式 save current day's stock_min data """ try: from gm.api import set_token from gm.api import history set_token("GMTOKEN") except: raise ModuleNotFoundError # 股票代码格式化 code_list = list( map( lambda x: "SHSE." + x if x[0] == "6" else "SZSE." + x, QA_fetch_get_stock_list().code.unique().tolist(), )) coll = client.stock_min coll.create_index([ ("code", pymongo.ASCENDING), ("time_stamp", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING), ]) err = [] def __transform_gm_to_qa(df): """ 将掘金数据转换为 qa 格式 """ if df is None or len(df) == 0: raise ValueError("掘金数据转换时没有数据") df = df.rename(columns={ "eob": "datetime", "volume": "vol", "symbol": "code" }).drop(["bob", "frequency", "position", "pre_close"], axis=1) df["code"] = df["code"].map(str).str.slice(5) df["datetime"] = pd.to_datetime(df["datetime"].map(str).str.slice( 0, 19)) df["date"] = df.datetime.map(str).str.slice(0, 10) df = df.set_index("datetime", drop=False) df["date_stamp"] = df["date"].apply(lambda x: QA_util_date_stamp(x)) df["time_stamp"] = ( df["datetime"].map(str).apply(lambda x: QA_util_date_stamp(x))) df["type"] = "1min" return df[[ "open", "close", "high", "low", "vol", "amount", "datetime", "code", "date", "date_stamp", "time_stamp", "type", ]] def __saveing_work(code, coll): QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: for type in ["1min"]: ref_ = coll.find({"code": str(code)[0:6], "type": type}) end_time = str(now_time())[0:19] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]["datetime"] QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min"].index(type), str(code)[0:6], start_time, end_time, type, ), ui_log=ui_log, ) if start_time != end_time: df = history( symbol=code, start_time=start_time, end_time=end_time, frequency=type, ) __data = __transform_gm_to_qa(df, code=code[:6]) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) __data == __transform_gm_to_qa(df, code=code[:6]) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = "2015-01-01 09:30:00" QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min"].index(type), str(code)[0:6], start_time, end_time, type, ), ui_log=ui_log, ) if start_time != end_time: df = history( symbol=code, start_time=start_time, end_time=end_time, frequency=type, ) __data = __transform_gm_to_qa(df, code=code[:6]) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log) executor = ThreadPoolExecutor(max_workers=2) # executor.map((__saving_work, stock_list[i_], coll),URLS) res = { executor.submit(__saving_work, code_list[i_], coll) for i_ in range(len(code_list)) } count = 0 for i_ in concurrent.futures.as_completed(res): # QA_util_log_info( # 'The {} of Total {}'.format(count, # len(code_list)), # ui_log=ui_log # ) strProgress = "DOWNLOAD PROGRESS {} ".format( str(float(count / len(code_list) * 100))[0:4] + "%") intProgress = int(count / len(code_list) * 10000.0) # QA_util_log_info( # strProgress, # ui_log, # ui_progress=ui_progress, # ui_progress_int_value=intProgress # ) count = count + 1 if len(err) < 1: QA_util_log_info("SUCCESS", ui_log=ui_log) else: QA_util_log_info(" ERROR CODE \n ", ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
def QA_SU_jq_save_stock_min(client=DATABASE, ui_log=None, ui_progress=None): """ 聚宽实现方式 save current day's stock_min data """ try: import jqdatasdk jqdatasdk.auth("JQUSERNAME", "JQUSERPASSWD") except: raise ModuleNotFoundError # 股票代码格式化 code_list = list( map( lambda x: x + ".XSHG" if x[0] == "6" else x + ".XSHE", QA_fetch_get_stock_list().code.unique().tolist(), )) coll = client.stock_min coll.create_index([ ("code", pymongo.ASCENDING), ("time_stamp", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING), ]) err = [] def __transform_jq_to_qa(df, code): """ 处理 jqdata 分钟数据为 qa 格式,并存入数据库 1. jdatasdk 数据格式: open close high low volume money 2018-12-03 09:31:00 10.59 10.61 10.61 10.59 8339100.0 88377836.0 2. 与 QUANTAXIS.QAFetch.QATdx.QA_fetch_get_stock_min 获取数据进行匹配,具体处理详见相应源码 open close high low vol amount ... datetime 2018-12-03 09:31:00 10.99 10.90 10.99 10.90 2.211700e+06 2.425626e+07 ... """ if df is None or len(df) == 0: raise ValueError("输入 JQData 数据") df = df.reset_index().rename(columns={ "index": "datetime", "volume": "vol", "money": "amount" }) df["code"] = code df["date"] = df.datetime.map(str).str.slice(0, 10) df = df.set_index("datetime", drop=False) df["date_stamp"] = df["date"].apply(lambda x: QA_util_date_stamp(x)) df["time_stamp"] = ( df["datetime"].map(str).apply(lambda x: QA_util_date_stamp(x))) df["type"] = "1min" return df[[ "open", "close", "high", "low", "vol", "amount", "datetime", "code", "date", "date_stamp", "time_stamp", "type", ]] def __saving_work(code, coll): QA_util_log_info("##JOB03 Now Saving STOCK_MIN ==== {}".format(code), ui_log=ui_log) try: for type in ["1min"]: ref_ = coll.find({"code": str(code)[0:6], "type": type}) end_time = str(now_time())[0:19] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]["datetime"] QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min"].index(type), str(code)[0:6], start_time, end_time, type, ), ui_log=ui_log, ) if start_time != end_time: df = jqdatasdk.get_price( security=code, start_date=start_time, end_date=end_time, frequency=type[:2], ) __data = __transform_jq_to_qa(df, code=code[:6]) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = "2015-01-01 09:30:00" QA_util_log_info( "##JOB03.{} Now Saving {} from {} to {} == {}".format( ["1min"].index(type), str(code)[0:6], start_time, end_time, type, ), ui_log=ui_log, ) if start_time != end_time: __data == __transform_jq_to_qa( jqdatasdk.get_price( security=code, start_date=start_time, end_date=end_time, frequency=type[:2], ), code=code[:6], ) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) except Exception as e: QA_util_log_info(e, ui_log=ui_log) err.append(code) QA_util_log_info(err, ui_log=ui_log) executor = ThreadPoolExecutor(max_workers=2) # executor.map((__saving_work, stock_list[i_], coll),URLS) res = { executor.submit(__saving_work, code_list[i_], coll) for i_ in range(len(code_list)) } count = 0 for i_ in concurrent.futures.as_completed(res): # QA_util_log_info( # 'The {} of Total {}'.format(count, # len(code_list)), # ui_log=ui_log # ) strProgress = "DOWNLOAD PROGRESS {} ".format( str(float(count / len(code_list) * 100))[0:4] + "%") intProgress = int(count / len(code_list) * 10000.0) # QA_util_log_info( # strProgress, # ui_log, # ui_progress=ui_progress, # ui_progress_int_value=intProgress # ) count = count + 1 if len(err) < 1: QA_util_log_info("SUCCESS", ui_log=ui_log) else: QA_util_log_info(" ERROR CODE \n ", ui_log=ui_log) QA_util_log_info(err, ui_log=ui_log)
def QA_SU_save_stock_min(client=DATABASE): """save stock_min Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll = client.stock_min coll.create_index([('code', pymongo.ASCENDING), ('time_stamp', pymongo.ASCENDING), ('date_stamp', pymongo.ASCENDING)]) err = [] def __saving_work(code, coll): QA_util_log_info('##JOB03 Now Saving STOCK_MIN ==== {}'.format( str(code))) try: for type in ['1min', '5min', '15min', '30min', '60min']: ref_ = coll.find({'code': str(code)[0:6], 'type': type}) end_time = str(now_time())[0:19] if ref_.count() > 0: start_time = ref_[ref_.count() - 1]['datetime'] QA_util_log_info( '##JOB03.{} Now Saving {} from {} to {} =={} '.format( ['1min', '5min', '15min', '30min', '60min'].index(type), str(code), start_time, end_time, type)) if start_time != end_time: __data = QA_fetch_get_stock_min( str(code), start_time, end_time, type) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)[1::]) else: start_time = '2015-01-01' QA_util_log_info( '##JOB03.{} Now Saving {} from {} to {} =={} '.format( ['1min', '5min', '15min', '30min', '60min'].index(type), str(code), start_time, end_time, type)) if start_time != end_time: __data = QA_fetch_get_stock_min( str(code), start_time, end_time, type) if len(__data) > 1: coll.insert_many( QA_util_to_json_from_pandas(__data)) except Exception as e: QA_util_log_info(e) err.append(code) executor = ThreadPoolExecutor(max_workers=4) #executor.map((__saving_work, stock_list[i_], coll),URLS) res = { executor.submit(__saving_work, stock_list[i_], coll) for i_ in range(len(stock_list)) } count = 0 for i_ in concurrent.futures.as_completed(res): QA_util_log_info('The {} of Total {}'.format(count, len(stock_list))) QA_util_log_info('DOWNLOAD PROGRESS {} '.format( str(float(count / len(stock_list) * 100))[0:4] + '%')) count = count + 1 if len(err) < 1: QA_util_log_info('SUCCESS') else: QA_util_log_info(' ERROR CODE \n ') QA_util_log_info(err)
def QA_SU_save_stock_day(client=DATABASE): """save stock_day Keyword Arguments: client {[type]} -- [description] (default: {DATABASE}) """ stock_list = QA_fetch_get_stock_list().code.unique().tolist() coll_stock_day = client.stock_day coll_stock_day.create_index([("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)]) err = [] def __saving_work(code, coll_stock_day): try: QA_util_log_info('##JOB01 Now Saving STOCK_DAY==== {}'.format( str(code))) #首选查找数据库 是否 有 这个代码的数据 ref = coll_stock_day.find({'code': str(code)[0:6]}) end_date = str(now_time())[0:10] #当前数据库已经包含了这个代码的数据, 继续增量更新 # 加入这个判断的原因是因为如果股票是刚上市的 数据库会没有数据 所以会有负索引问题出现 if ref.count() > 0: # 接着上次获取的日期继续更新 start_date = ref[ref.count() - 1]['date'] QA_util_log_info( 'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'. format(code, start_date, end_date)) if start_date != end_date: coll_stock_day.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_day( str(code), QA_util_get_next_day(start_date), end_date, '00'))) #当前数据库中没有这个代码的股票数据, 从1990-01-01 开始下载所有的数据 else: start_date = '1990-01-01' QA_util_log_info( 'UPDATE_STOCK_DAY \n Trying updating {} from {} to {}'. format(code, start_date, end_date)) if start_date != end_date: coll_stock_day.insert_many( QA_util_to_json_from_pandas( QA_fetch_get_stock_day(str(code), start_date, end_date, '00'))) except Exception as error0: print(error0) err.append(str(code)) for item in range(len(stock_list)): QA_util_log_info('The {} of Total {}'.format(item, len(stock_list))) QA_util_log_info('DOWNLOAD PROGRESS {} '.format( str(float(item / len(stock_list) * 100))[0:4] + '%')) __saving_work(stock_list[item], coll_stock_day) if len(err) < 1: QA_util_log_info('SUCCESS save stock day ^_^') else: QA_util_log_info(' ERROR CODE \n ') QA_util_log_info(err)