def QA_ts_update_daily_basic(): """ 更新每日全市场重要基本面指标,用于选股分析和报表展示 """ coll = DATABASE.daily_basic coll.create_index( [("code", ASCENDING), ("trade_date_stamp", ASCENDING)], unique=True, ) coll.create_index([("trade_date_stamp", ASCENDING)], ) ref = coll.find({}) cnt = coll.count() start_date = "1990-01-01" if cnt > 0: start_date = ref[cnt - 1]["trade_date"] end_date = datetime.date.today().strftime("%Y-%m-%d") if end_date != start_date: start_trade_date = QA_util_get_next_trade_date(start_date) end_trade_date = QA_util_get_pre_trade_date(end_date) else: return for trade_date in trade_date_sse[trade_date_sse.index(start_trade_date): trade_date_sse.index(end_trade_date) + 1]: print(f"saveing {trade_date} daily basic") df = QA_fetch_get_daily_basic(trade_date=trade_date) if df.empty: continue df = df.where(df.notnull(), None).reset_index() df["trade_date_stamp"] = df["trade_date"].apply(QA_util_date_stamp) js = QA_util_to_json_from_pandas(df) coll.insert_many(js)
def _fetch_get_daily_basic(trade_date, fields, trial_count): nonlocal pro, max_trial try: if trial_count >= max_trial: raise ValueError("[ERROR]\tEXCEED MAX TRIAL!") if not trade_date: trade_date = QA_util_get_pre_trade_date( datetime.date.today(), 1).replace("-", "") else: trade_date = pd.Timestamp(trade_date).strftime("%Y%m%d") if not fields: qry = f"pro.daily_basic(trade_date={trade_date})" else: if isinstance(fields, str): fields = list(set([fields] + ["ts_code", "trade_date"])) fields = ",".join(fields) qry = "pro.daily_basic(trade_date={trade_date}, fields={fields})" df = eval(qry) if df is None: raise ValueError("[ERROR]") return df except: time.sleep(61) _fetch_get_daily_basic( trade_date, fields, trial_count+1 )
def QA_fetch_get_index_indicator(code, start_date, end_date, type='day'): if type == 'day': start = QA_util_get_pre_trade_date(start_date, 180) rng1 = pd.Series(pd.date_range(start_date, end_date, freq='D')).apply(lambda x: str(x)[0:10]) try: data = QA.QA_fetch_index_day( code, start, end_date, format='pd').reset_index(drop=True).set_index(['date', 'code']) data = QA_DataStruct_Stock_day(data) except: print("No data") elif type == 'week': start = QA_util_get_pre_trade_date(start_date, 187) rng1 = pd.Series(pd.date_range(start_date, end_date, freq='D')).apply(lambda x: str(x)[0:10]) try: data = QA.QA_fetch_index_day( code, start, end_date, format='pd').reset_index(drop=True).set_index(['date', 'code']) data = QA_DataStruct_Stock_day( data.groupby('code', sort=True).apply(ohlc, 7)) except: print("No data") elif type == 'month': start = QA_util_get_pre_trade_date(start_date, 210) rng1 = pd.Series(pd.date_range(start_date, end_date, freq='D')).apply(lambda x: str(x)[0:10]) try: data = QA.QA_fetch_index_day( code, start, end_date, format='pd').reset_index(drop=True).set_index(['date', 'code']) data = QA_DataStruct_Stock_day( data.groupby('code', sort=True).apply(ohlc, 30)) except: print("No data") if data == None: return None else: data = get_indicator(data, rng1) return (data)
def QA_fetch_financial_code(ndays=10): START = str(QA_util_get_pre_trade_date(QA_util_today_str(), ndays)) code = list( QA_fetch_stock_financial_calendar( QA.QA_fetch_stock_list_adv().code.tolist(), start=START)['code']) market_day = pd.DataFrame( QA_fetch_stock_basic_info_tushare())[['code', 'timeToMarket']] market_day['TM'] = market_day['timeToMarket'].apply(lambda x: str( QA_util_add_months(QA_util_date_int2str(int(x)), 0) if x > 0 else None)[0:10]) code = list(market_day[market_day['TM'] >= START]['code'].values) + code return (code)
def QA_fetch_get_stock_indicator(code, start_date, end_date, type='day'): if type == 'day': start = QA_util_get_pre_trade_date(start_date, 200) rng1 = pd.Series(pd.date_range(start_date, end_date, freq='D')).apply(lambda x: str(x)[0:10]) try: data = QA_fetch_stock_day_adv(code, start, end_date) data = data.to_qfq() except: print("No data") elif type == 'week': start = QA_util_get_pre_trade_date(start_date, 200) rng1 = pd.Series(pd.date_range(start_date, end_date, freq='D')).apply(lambda x: str(x)[0:10]) try: data = QA_fetch_stock_day_adv(code, start, end_date) data = data.to_qfq() data = QA_DataStruct_Stock_day( data.data.groupby('code', sort=True).apply(ohlc, 7)) except: print("No data") elif type == 'month': start = QA_util_get_pre_trade_date(start_date, 220) rng1 = pd.Series(pd.date_range(start_date, end_date, freq='D')).apply(lambda x: str(x)[0:10]) try: data = QA_fetch_stock_day_adv(code, start, end_date) data = data.to_qfq() data = QA_DataStruct_Stock_day( data.data.groupby('code', sort=True).apply(ohlc, 30)) except: print("No data") if data == None: return None else: data = get_indicator(data, rng1) return (data)
def ETL_stock_day(codes, start=None, end=None): if start is None: start = '2008-01-01' if end is None: end = QA_util_today_str() if start != end: rng = pd.Series(pd.date_range(start, end, freq='D')).apply(lambda x: str(x)[0:10]) else: rng = str(start)[0:10] start_date = QA_util_get_pre_trade_date(start,100) data = QA_fetch_stock_day_adv(codes,start_date,end) res1 = data.to_qfq().data res1.columns = [x + '_qfq' for x in res1.columns] data = data.data.join(res1).fillna(0).reset_index() res = data.groupby('code').apply(pct) res = res.reset_index(level = 0,drop = True).reset_index().set_index(['date','code']).loc[rng].replace([np.inf, -np.inf], 0) res = res.where((pd.notnull(res)), None) return(res)
def QA_fetch_stock_target(codes, start_date, end_date, type='close'): if QA_util_if_trade(end_date): pass else: end_date = QA_util_get_real_date(end_date) end = QA_util_get_next_datetime(end_date, 5) rng1 = pd.Series(pd.date_range(start_date, end_date, freq='D')).apply(lambda x: str(x)[0:10]) data = QA.QA_fetch_stock_day_adv(codes, start_date, end) market = QA.QA_fetch_index_day(['000001'], start_date, end, format='pd')['close'].reset_index() market = index_pct(market)[[ 'date', 'INDEX_TARGET', 'INDEX_TARGET3', 'INDEX_TARGET4', 'INDEX_TARGET5', 'INDEX_TARGET10' ]] res1 = data.to_qfq().data res1.columns = [x + '_qfq' for x in res1.columns] data = data.data.join(res1).fillna(0).reset_index() res = data.groupby('code').apply(pct, type=type)[[ 'date', 'code', 'PRE_DATE', 'OPEN_MARK', 'PASS_MARK', 'TARGET', 'TARGET3', 'TARGET4', 'TARGET5', 'TARGET10', 'AVG_TARGET' ]] res = pd.merge(res, market, on='date') res['date'] = res['date'].apply(lambda x: str(x)[0:10]) res['next_date'] = res['date'].apply( lambda x: QA_util_get_pre_trade_date(x, -2)) res['PRE_DATE'] = res['PRE_DATE'].apply(lambda x: str(x)[0:10]) res = res.set_index(['date', 'code']).loc[rng1] res['INDEX_TARGET'] = res['TARGET'] - res['INDEX_TARGET'] res['INDEX_TARGET3'] = res['TARGET3'] - res['INDEX_TARGET3'] res['INDEX_TARGET4'] = res['TARGET4'] - res['INDEX_TARGET4'] res['INDEX_TARGET5'] = res['TARGET5'] - res['INDEX_TARGET5'] res['INDEX_TARGET10'] = res['TARGET10'] - res['INDEX_TARGET10'] for columnname in res.columns: if res[columnname].dtype == 'float64': res[columnname] = res[columnname].astype('float16') if res[columnname].dtype == 'int64': res[columnname] = res[columnname].astype('int8') return (res)
def QA_fetch_get_index_quant_data(codes, start_date, end_date): '获取股票量化机器学习最终指标V1' start = QA_util_get_pre_trade_date(start_date, 15) rng1 = pd.Series(pd.date_range(start_date, end_date, freq='D')).apply(lambda x: str(x)[0:10]) alpha = QA_fetch_index_alpha_adv(codes, start, end_date).data[[ 'alpha_001', 'alpha_002', 'alpha_003', 'alpha_004', 'alpha_005', 'alpha_006', 'alpha_007', 'alpha_008', 'alpha_009', 'alpha_010', 'alpha_012', 'alpha_013', 'alpha_014', 'alpha_015', 'alpha_016', 'alpha_017', 'alpha_018', 'alpha_019', 'alpha_020', 'alpha_021', 'alpha_022', 'alpha_023', 'alpha_024', 'alpha_025', 'alpha_026', 'alpha_028', 'alpha_029', 'alpha_031', 'alpha_032', 'alpha_033', 'alpha_034', 'alpha_035', 'alpha_036', 'alpha_037', 'alpha_038', 'alpha_039', 'alpha_040', 'alpha_041', 'alpha_042', 'alpha_044', 'alpha_045', 'alpha_046', 'alpha_047', 'alpha_048', 'alpha_049', 'alpha_052', 'alpha_053', 'alpha_054', 'alpha_055', 'alpha_056', 'alpha_057', 'alpha_058', 'alpha_059', 'alpha_061', 'alpha_062', 'alpha_063', 'alpha_064', 'alpha_065', 'alpha_066', 'alpha_067', 'alpha_068', 'alpha_071', 'alpha_072', 'alpha_074', 'alpha_077', 'alpha_078', 'alpha_080', 'alpha_082', 'alpha_083', 'alpha_085', 'alpha_086', 'alpha_087', 'alpha_088', 'alpha_089', 'alpha_090', 'alpha_091', 'alpha_092', 'alpha_093', 'alpha_096', 'alpha_098', 'alpha_099', 'alpha_102', 'alpha_103', 'alpha_104', 'alpha_105', 'alpha_106', 'alpha_107', 'alpha_108', 'alpha_109', 'alpha_113', 'alpha_114', 'alpha_115', 'alpha_116', 'alpha_117', 'alpha_118', 'alpha_119', 'alpha_120', 'alpha_122', 'alpha_123', 'alpha_124', 'alpha_125', 'alpha_126', 'alpha_129', 'alpha_130', 'alpha_133', 'alpha_134', 'alpha_135', 'alpha_138', 'alpha_139', 'alpha_141', 'alpha_142', 'alpha_145', 'alpha_148', 'alpha_152', 'alpha_153', 'alpha_156', 'alpha_158', 'alpha_159', 'alpha_160', 'alpha_161', 'alpha_162', 'alpha_163', 'alpha_164', 'alpha_167', 'alpha_168', 'alpha_169', 'alpha_170', 'alpha_171', 'alpha_172', 'alpha_173', 'alpha_175', 'alpha_176', 'alpha_177', 'alpha_178', 'alpha_179', 'alpha_184', 'alpha_185', 'alpha_186', 'alpha_187', 'alpha_188', 'alpha_189', 'alpha_191' ]].loc[rng1] for columnname in alpha.columns: if alpha[columnname].dtype == 'float64': alpha[columnname] = alpha[columnname].astype('float16') if alpha[columnname].dtype == 'int64': alpha[columnname] = alpha[columnname].astype('int8') technical = QA_fetch_index_technical_index_adv( codes, start, end_date).data.drop([ 'PBX1', 'PBX1_C', 'PBX2', 'PBX2_C', 'PBX3', 'PBX3_C', 'PBX4', 'PBX4_C', 'PBX5', 'PBX5_C', 'PBX6', 'PBX6_C', 'PBX_STD', 'PVT', 'PVT_C' ], axis=1).loc[rng1] tech_week = QA_fetch_index_technical_index_adv( codes, start, end_date, 'week').data.drop([ 'PBX1', 'PBX1_C', 'PBX2', 'PBX2_C', 'PBX3', 'PBX3_C', 'PBX4', 'PBX4_C', 'PBX5', 'PBX5_C', 'PBX6', 'PBX6_C', 'PBX_STD', 'PVT', 'PVT_C' ], axis=1).loc[rng1] tech_week.columns = [x + '_WK' for x in tech_week.columns] technical = technical.join(tech_week) for columnname in technical.columns: if technical[columnname].dtype == 'float64': technical[columnname] = technical[columnname].astype('float16') if technical[columnname].dtype == 'int64': technical[columnname] = technical[columnname].astype('int8') res = technical.join(alpha) cols = [ 'AVG5_CR', 'AVG10_CR', 'AVG20_CR', 'AVG30_CR', 'AVG60_CR', 'AVG5_TR', 'AVG10_TR', 'AVG20_TR', 'AVG30_TR', 'AVG60_TR', 'ADTM_CROSS1', 'ADTM_CROSS2', 'ADX_CROSS1', 'ADX_CROSS2', 'BBI_CROSS1', 'BBI_CROSS2', 'BIAS_CROSS1', 'BIAS_CROSS2', 'CCI_CROSS1', 'CCI_CROSS2', 'CCI_CROSS3', 'CCI_CROSS4', 'CDL2CROWS', 'CDL3BLACKCROWS', 'CDL3INSIDE', 'CDL3LINESTRIKE', 'CDL3OUTSIDE', 'CDL3STARSINSOUTH', 'CDL3WHITESOLDIERS', 'CDLABANDONEDBABY', 'CDLADVANCEBLOCK', 'CDLBELTHOLD', 'CDLBREAKAWAY', 'CDLCLOSINGMARUBOZU', 'CDLCONCEALBABYSWALL', 'CDLCOUNTERATTACK', 'CDLDARKCLOUDCOVER', 'CDLDOJI', 'CDLDOJISTAR', 'CDLDRAGONFLYDOJI', 'CDLENGULFING', 'CDLEVENINGDOJISTAR', 'CDLEVENINGSTAR', 'CDLGAPSIDESIDEWHITE', 'CDLGRAVESTONEDOJI', 'CDLHAMMER', 'CDLHANGINGMAN', 'CDLHARAMI', 'CDLHARAMICROSS', 'CDLHIGHWAVE', 'CDLHIKKAKE', 'CDLHIKKAKEMOD', 'CDLHOMINGPIGEON', 'CDLIDENTICAL3CROWS', 'CDLINNECK', 'CDLINVERTEDHAMMER', 'CDLKICKING', 'CDLKICKINGBYLENGTH', 'CDLLADDERBOTTOM', 'CDLLONGLEGGEDDOJI', 'CDLLONGLINE', 'CDLMARUBOZU', 'CDLMATCHINGLOW', 'CDLMATHOLD', 'CDLMORNINGDOJISTAR', 'CDLMORNINGSTAR', 'CDLONNECK', 'CDLPIERCING', 'CDLRICKSHAWMAN', 'CDLRISEFALL3METHODS', 'CDLSEPARATINGLINES', 'CDLSHOOTINGSTAR', 'CDLSHORTLINE', 'CDLSPINNINGTOP', 'CDLSTALLEDPATTERN', 'CDLSTICKSANDWICH', 'CDLTAKURI', 'CDLTASUKIGAP', 'CDLTHRUSTING', 'CDLTRISTAR', 'CDLUNIQUE3RIVER', 'CDLUPSIDEGAP2CROWS', 'CDLXSIDEGAP3METHODS', 'CHO_CROSS1', 'CHO_CROSS2', 'CROSS_JC', 'CROSS_SC', 'DI_CROSS1', 'DI_CROSS2', 'DMA_CROSS1', 'DMA_CROSS2', 'KDJ_CROSS1', 'KDJ_CROSS2', 'MACD_TR', 'MIKE_TR', 'MIKE_BOLL', 'MIKE_WRJC', 'MIKE_WRSC', 'MIKE_WSJC', 'MIKE_WSSC', 'MTM_CROSS1', 'MTM_CROSS2', 'MTM_CROSS3', 'MTM_CROSS4', 'OSC_CROSS1', 'OSC_CROSS2', 'OSC_CROSS3', 'OSC_CROSS4', 'PBX_TR', 'RSI_CROSS1', 'RSI_CROSS2', 'SKDJ_CROSS1', 'SKDJ_CROSS2', 'VPT_CROSS1', 'VPT_CROSS2', 'VPT_CROSS3', 'VPT_CROSS4', 'WR_CROSS1', 'WR_CROSS2', 'RNG_L_O', 'LAG_TOR_O', 'DAYS_O' ] col_tar = [] for i in range(len(cols)): for j in range(len(list(res.columns))): if list(res.columns)[j].find(cols[i]) == -1: continue col_tar.append(list(res.columns)[j]) col_tar = list(set(col_tar)) res = res[[x for x in list(res.columns) if x not in col_tar]].groupby('date').apply(get_trans).join( res[col_tar]).reset_index() res = res.assign(date_stamp=res['date'].apply( lambda x: QA_util_date_stamp(str(x)[0:10]))) return (res)
def QA_SU_save_stock_fianacial_percent(code=None, start_date=None, end_date=None, client=DATABASE, ui_log=None, ui_progress=None): if code is None: codes = list(QA_fetch_stock_list_adv()['code']) else: codes = QA_util_code_tolist(code) if start_date is None: if end_date is None: start_date = QA_util_get_pre_trade_date(QA_util_today_str(), 1) end_date = QA_util_today_str() elif end_date is not None: start_date = '2008-01-01' elif start_date is not None: if end_date == None: end_date = QA_util_today_str() elif end_date is not None: if end_date < start_date: print('end_date should large than start_date') stock_financial_percent = DATABASE.stock_financial_percent stock_financial_percent.create_index([("code", ASCENDING), ("date_stamp", ASCENDING)], unique=True) err = [] def __saving_work(code, START_DATE, END_DATE, stock_financial_percent): try: QA_util_log_info( '##JOB01 Pre Data stock_fianacial_percent from {START_DATE} to {END_DATE} ' .format(START_DATE=START_DATE, END_DATE=END_DATE), ui_log) data = QA_fetch_get_stock_financial_percent( code, START_DATE, END_DATE) data = data.drop_duplicates((['code', 'date'])) QA_util_log_info( '##JOB02 Got Data stock_fianacial_percent from {START_DATE} to {END_DATE} ' .format(START_DATE=START_DATE, END_DATE=END_DATE), ui_log) if data is not None: stock_financial_percent.insert_many( QA_util_to_json_from_pandas(data), ordered=False) QA_util_log_info( '##JOB03 Now stock_fianacial_percent saved from {START_DATE} to {END_DATE} ' .format(START_DATE=START_DATE, END_DATE=END_DATE), ui_log) else: QA_util_log_info( '##JOB01 No Data stock_fianacial_percent from {START_DATE} to {END_DATE} ' .format(START_DATE=START_DATE, END_DATE=END_DATE), ui_log) except Exception as error0: print(error0) err.append(str(code)) k = 500 for i in range(0, len(codes), k): code = codes[i:i + k] QA_util_log_info('The {} of Total {}'.format((i + k), len(codes))) strProgressToLog = 'DOWNLOAD PROGRESS {}'.format( str(float((i + k) / len(codes) * 100))[0:4] + '%', ui_log) intProgressToLog = int(float((i + k) / len(codes) * 100)) QA_util_log_info(strProgressToLog, ui_log=ui_log, ui_progress=ui_progress, ui_progress_int_value=intProgressToLog) __saving_work(code, start_date, end_date, stock_financial_percent) if len(err) < 1: QA_util_log_info('SUCCESS save stock_fianacial_percent ^_^', ui_log) else: QA_util_log_info(' ERROR CODE \n ', ui_log) QA_util_log_info(err, ui_log)
def QA_SU_save_index_quant_day(code=None, start_date=None, end_date=None, ui_log=None, ui_progress=None): if start_date is None: if end_date is None: start_date = QA_util_get_pre_trade_date(QA_util_today_str(), 1) end_date = QA_util_today_str() elif end_date is not None: start_date = '2008-01-01' elif start_date is not None: if end_date == None: end_date = QA_util_today_str() elif end_date is not None: if end_date < start_date: print('end_date should large than start_date') if code is None: code = list(QA_fetch_index_list_adv()['code']) code = [i for i in code if i.startswith('880') == True] code = [i for i in code if i.startswith('8800') == False] code = [i for i in code if i.startswith('8807') == False] code = [i for i in code if i.startswith('8808') == False] index = DATABASE.index_quant_data_index index.create_index([("code", ASCENDING), ("date_stamp", ASCENDING)], unique=True) week = DATABASE.index_quant_data_week week.create_index([("code", ASCENDING), ("date_stamp", ASCENDING)], unique=True) alpha = DATABASE.index_quant_data_alpha alpha.create_index([("code", ASCENDING), ("date_stamp", ASCENDING)], unique=True) try: data1 = QA_fetch_get_index_quant_data(code, start_date, end_date) except: data1 = None else: QA_util_log_info( '##JOB got Data index quant data ============== from {from_} to {to_} ' .format(from_=start_date, to_=end_date), ui_log) deal_date_list = QA_util_get_trade_range(start_date, end_date) if deal_date_list is None: print('not a trading day') elif data1 is None: print('not a trading day') else: for deal_date in deal_date_list: if QA_util_if_trade(deal_date): data = data1[data1['date'] == deal_date] else: data = None if data is not None: data = data.drop_duplicates((['code', 'date'])) alpha_data = data[[ 'code', 'date', 'date_stamp', 'alpha_001', 'alpha_002', 'alpha_003', 'alpha_004', 'alpha_005', 'alpha_006', 'alpha_007', 'alpha_008', 'alpha_009', 'alpha_010', 'alpha_012', 'alpha_013', 'alpha_014', 'alpha_015', 'alpha_016', 'alpha_017', 'alpha_018', 'alpha_019', 'alpha_020', 'alpha_021', 'alpha_022', 'alpha_023', 'alpha_024', 'alpha_025', 'alpha_026', 'alpha_028', 'alpha_029', 'alpha_031', 'alpha_032', 'alpha_033', 'alpha_034', 'alpha_035', 'alpha_036', 'alpha_037', 'alpha_038', 'alpha_039', 'alpha_040', 'alpha_041', 'alpha_042', 'alpha_044', 'alpha_045', 'alpha_046', 'alpha_047', 'alpha_048', 'alpha_049', 'alpha_052', 'alpha_053', 'alpha_054', 'alpha_055', 'alpha_056', 'alpha_057', 'alpha_058', 'alpha_059', 'alpha_061', 'alpha_062', 'alpha_063', 'alpha_064', 'alpha_065', 'alpha_066', 'alpha_067', 'alpha_068', 'alpha_071', 'alpha_072', 'alpha_074', 'alpha_077', 'alpha_078', 'alpha_080', 'alpha_082', 'alpha_083', 'alpha_085', 'alpha_086', 'alpha_087', 'alpha_088', 'alpha_089', 'alpha_090', 'alpha_091', 'alpha_092', 'alpha_093', 'alpha_096', 'alpha_098', 'alpha_099', 'alpha_102', 'alpha_103', 'alpha_104', 'alpha_105', 'alpha_106', 'alpha_107', 'alpha_108', 'alpha_109', 'alpha_113', 'alpha_114', 'alpha_115', 'alpha_116', 'alpha_117', 'alpha_118', 'alpha_119', 'alpha_120', 'alpha_122', 'alpha_123', 'alpha_124', 'alpha_125', 'alpha_126', 'alpha_129', 'alpha_130', 'alpha_133', 'alpha_134', 'alpha_135', 'alpha_138', 'alpha_139', 'alpha_141', 'alpha_142', 'alpha_145', 'alpha_148', 'alpha_152', 'alpha_153', 'alpha_156', 'alpha_158', 'alpha_159', 'alpha_160', 'alpha_161', 'alpha_162', 'alpha_163', 'alpha_164', 'alpha_167', 'alpha_168', 'alpha_169', 'alpha_170', 'alpha_171', 'alpha_172', 'alpha_173', 'alpha_175', 'alpha_176', 'alpha_177', 'alpha_178', 'alpha_179', 'alpha_184', 'alpha_185', 'alpha_186', 'alpha_187', 'alpha_188', 'alpha_189', 'alpha_191' ]] index_data = data[[ 'code', 'date', 'date_stamp', 'AD', 'ADDI', 'ADDI_C', 'ADTM', 'ADX', 'ADXR', 'ADX_C', 'AD_C', 'AMA', 'ASI', 'ASIT', 'ATR', 'ATRR', 'BBI', 'BIAS1', 'BIAS2', 'BIAS3', 'BODY', 'BODY_ABS', 'BOLL', 'CCI', 'CHO', 'DDD', 'DDI', 'DDI_C', 'DEA', 'DI1', 'DI2', 'DIF', 'DI_M', 'KDJ_D', 'KDJ_J', 'KDJ_K', 'LB', 'MA1', 'MA10', 'MA120', 'MA180', 'MA2', 'MA20', 'MA3', 'MA4', 'MA5', 'MA60', 'MAADTM', 'MACD', 'MACHO', 'MAOSC', 'MAVPT', 'MFI', 'MFI_C', 'MIKE_BOLL', 'MR', 'MS', 'MTM', 'MTMMA', 'OBV', 'OBV_C', 'OSC', 'PRICE_PCG', 'ROC', 'ROCMA', 'RSI1', 'RSI1_C', 'RSI2', 'RSI2_C', 'RSI3', 'RSI3_C', 'RSV', 'SHA_LOW', 'SHA_UP', 'SKDJ_D', 'SKDJ_K', 'SR', 'SS', 'TR', 'UB', 'VPT', 'VR', 'VRSI', 'VRSI_C', 'VSTD', 'WIDTH', 'WR', 'WR1', 'WR2', 'WS', 'CCI_CROSS4', 'DMA_CROSS1', 'CDLMORNINGDOJISTAR', 'CDLSEPARATINGLINES', 'WR_CROSS1', 'KDJ_CROSS2', 'CDLHARAMICROSS', 'CDLEVENINGSTAR', 'BBI_CROSS2', 'VPT_CROSS1', 'CROSS_SC', 'CDLSHORTLINE', 'SKDJ_CROSS1', 'CDLABANDONEDBABY', 'CDL3STARSINSOUTH', 'CDLUNIQUE3RIVER', 'CDLKICKINGBYLENGTH', 'CDLHOMINGPIGEON', 'CDLTAKURI', 'CDL3BLACKCROWS', 'CDLSTICKSANDWICH', 'CDLTASUKIGAP', 'VPT_CROSS2', 'CDLSHOOTINGSTAR', 'CDLCONCEALBABYSWALL', 'WR_CROSS2', 'ADTM_CROSS1', 'BIAS_CROSS2', 'MTM_CROSS4', 'CCI_CROSS3', 'CDLHAMMER', 'CDLMARUBOZU', 'MACD_TR', 'CDL3INSIDE', 'CDLUPSIDEGAP2CROWS', 'MTM_CROSS1', 'CDLGRAVESTONEDOJI', 'KDJ_CROSS1', 'CDLMATHOLD', 'MIKE_TR', 'CDLLADDERBOTTOM', 'CDLMORNINGSTAR', 'OSC_CROSS2', 'OSC_CROSS4', 'ADX_CROSS2', 'DI_CROSS1', 'MTM_CROSS2', 'CDLDRAGONFLYDOJI', 'CCI_CROSS2', 'CDLSPINNINGTOP', 'CDLHIKKAKEMOD', 'DMA_CROSS2', 'MIKE_WRJC', 'CROSS_JC', 'OSC_CROSS3', 'RSI_CROSS1', 'MIKE_WSJC', 'MTM_CROSS3', 'CDLADVANCEBLOCK', 'BIAS_CROSS1', 'CDLCLOSINGMARUBOZU', 'CDL3OUTSIDE', 'VPT_CROSS3', 'CDLEVENINGDOJISTAR', 'CDL2CROWS', 'CDLHANGINGMAN', 'ADTM_CROSS2', 'CDLMATCHINGLOW', 'CDLHIKKAKE', 'CDLKICKING', 'CDLCOUNTERATTACK', 'CHO_CROSS1', 'CDLHARAMI', 'BBI_CROSS1', 'MIKE_WRSC', 'CDLINVERTEDHAMMER', 'CCI_CROSS1', 'CDLBREAKAWAY', 'CDLGAPSIDESIDEWHITE', 'DI_CROSS2', 'CDL3WHITESOLDIERS', 'CDLTRISTAR', 'CDLXSIDEGAP3METHODS', 'CDLPIERCING', 'VPT_CROSS4', 'CDLLONGLINE', 'CDLDOJI', 'CDLHIGHWAVE', 'CDLSTALLEDPATTERN', 'ADX_CROSS1', 'CDL3LINESTRIKE', 'CDLBELTHOLD', 'CDLINNECK', 'CDLONNECK', 'CDLRICKSHAWMAN', 'CDLTHRUSTING', 'CDLIDENTICAL3CROWS', 'SKDJ_CROSS2', 'CDLDOJISTAR', 'RSI_CROSS2', 'OSC_CROSS1', 'CDLRISEFALL3METHODS', 'CDLLONGLEGGEDDOJI', 'MIKE_WSSC', 'CDLDARKCLOUDCOVER', 'CHO_CROSS2', 'CDLENGULFING' ]] week_data = data[[ 'code', 'date', 'date_stamp', 'AD_WK', 'ADDI_WK', 'ADDI_C_WK', 'ADTM_WK', 'ADX_WK', 'ADXR_WK', 'ADX_C_WK', 'AD_C_WK', 'AMA_WK', 'ASI_WK', 'ASIT_WK', 'ATR_WK', 'ATRR_WK', 'BBI_WK', 'BIAS1_WK', 'BIAS2_WK', 'BIAS3_WK', 'BODY_WK', 'BODY_ABS_WK', 'BOLL_WK', 'CCI_WK', 'CHO_WK', 'DDD_WK', 'DDI_WK', 'DDI_C_WK', 'DEA_WK', 'DI1_WK', 'DI2_WK', 'DIF_WK', 'DI_M_WK', 'KDJ_D_WK', 'KDJ_J_WK', 'KDJ_K_WK', 'LB_WK', 'MA1_WK', 'MA10_WK', 'MA120_WK', 'MA180_WK', 'MA2_WK', 'MA20_WK', 'MA3_WK', 'MA4_WK', 'MA5_WK', 'MA60_WK', 'MAADTM_WK', 'MACD_WK', 'MACHO_WK', 'MAOSC_WK', 'MAVPT_WK', 'MFI_WK', 'MFI_C_WK', 'MIKE_BOLL_WK', 'MR_WK', 'MS_WK', 'MTM_WK', 'MTMMA_WK', 'OBV_WK', 'OBV_C_WK', 'OSC_WK', 'PRICE_PCG_WK', 'ROC_WK', 'ROCMA_WK', 'RSI1_WK', 'RSI1_C_WK', 'RSI2_WK', 'RSI2_C_WK', 'RSI3_WK', 'RSI3_C_WK', 'RSV_WK', 'SHA_LOW_WK', 'SHA_UP_WK', 'SKDJ_D_WK', 'SKDJ_K_WK', 'SR_WK', 'SS_WK', 'TR_WK', 'UB_WK', 'VPT_WK', 'VR_WK', 'VRSI_WK', 'VRSI_C_WK', 'VSTD_WK', 'WIDTH_WK', 'WR_WK', 'WR1_WK', 'WR2_WK', 'WS_WK', 'CDLDRAGONFLYDOJI_WK', 'MIKE_WRJC_WK', 'CDLRICKSHAWMAN_WK', 'MIKE_WSSC_WK', 'DI_CROSS2_WK', 'CDLHARAMI_WK', 'BBI_CROSS2_WK', 'VPT_CROSS2_WK', 'CDLBELTHOLD_WK', 'CDLHAMMER_WK', 'CDL3INSIDE_WK', 'CDLTRISTAR_WK', 'OSC_CROSS1_WK', 'CDLMARUBOZU_WK', 'CDLTASUKIGAP_WK', 'CDLSPINNINGTOP_WK', 'CDLDARKCLOUDCOVER_WK', 'CDL3BLACKCROWS_WK', 'BIAS_CROSS2_WK', 'OSC_CROSS3_WK', 'CHO_CROSS1_WK', 'CDLMORNINGSTAR_WK', 'ADX_CROSS2_WK', 'CDLINNECK_WK', 'ADTM_CROSS2_WK', 'MACD_TR_WK', 'CDLDOJI_WK', 'MTM_CROSS1_WK', 'CDLCOUNTERATTACK_WK', 'CDLLONGLINE_WK', 'KDJ_CROSS1_WK', 'CDLADVANCEBLOCK_WK', 'CDLHANGINGMAN_WK', 'KDJ_CROSS2_WK', 'ADX_CROSS1_WK', 'CDLMATHOLD_WK', 'CDLABANDONEDBABY_WK', 'WR_CROSS2_WK', 'MIKE_WRSC_WK', 'OSC_CROSS2_WK', 'CDLGAPSIDESIDEWHITE_WK', 'CROSS_JC_WK', 'MTM_CROSS4_WK', 'CDLSHOOTINGSTAR_WK', 'ADTM_CROSS1_WK', 'CDL3OUTSIDE_WK', 'CDLLONGLEGGEDDOJI_WK', 'CDL3LINESTRIKE_WK', 'CDLHIKKAKE_WK', 'CDLSTALLEDPATTERN_WK', 'MTM_CROSS2_WK', 'SKDJ_CROSS2_WK', 'CDLEVENINGDOJISTAR_WK', 'OSC_CROSS4_WK', 'CDLTAKURI_WK', 'CDLSHORTLINE_WK', 'CROSS_SC_WK', 'CDLMATCHINGLOW_WK', 'CCI_CROSS4_WK', 'MIKE_WSJC_WK', 'CDLHOMINGPIGEON_WK', 'VPT_CROSS1_WK', 'CDLCLOSINGMARUBOZU_WK', 'WR_CROSS1_WK', 'CDLTHRUSTING_WK', 'BBI_CROSS1_WK', 'DMA_CROSS2_WK', 'RSI_CROSS1_WK', 'CDLRISEFALL3METHODS_WK', 'CDLHIKKAKEMOD_WK', 'CCI_CROSS3_WK', 'CDLKICKINGBYLENGTH_WK', 'CDLLADDERBOTTOM_WK', 'DI_CROSS1_WK', 'VPT_CROSS3_WK', 'CDLHARAMICROSS_WK', 'CHO_CROSS2_WK', 'CCI_CROSS2_WK', 'CDL3STARSINSOUTH_WK', 'CDLXSIDEGAP3METHODS_WK', 'RSI_CROSS2_WK', 'MIKE_TR_WK', 'CDLDOJISTAR_WK', 'CDLCONCEALBABYSWALL_WK', 'CDLPIERCING_WK', 'CDLHIGHWAVE_WK', 'CDLMORNINGDOJISTAR_WK', 'CDLSTICKSANDWICH_WK', 'CDLGRAVESTONEDOJI_WK', 'CDLINVERTEDHAMMER_WK', 'CDLKICKING_WK', 'CDLSEPARATINGLINES_WK', 'CDLBREAKAWAY_WK', 'MTM_CROSS3_WK', 'CDLUNIQUE3RIVER_WK', 'CCI_CROSS1_WK', 'DMA_CROSS1_WK', 'VPT_CROSS4_WK', 'CDLEVENINGSTAR_WK', 'CDL2CROWS_WK', 'CDL3WHITESOLDIERS_WK', 'CDLIDENTICAL3CROWS_WK', 'CDLUPSIDEGAP2CROWS_WK', 'CDLENGULFING_WK', 'SKDJ_CROSS1_WK', 'BIAS_CROSS1_WK', 'CDLONNECK_WK' ]] QA_util_log_info( '##JOB01 Pre Data index quant data ============== {deal_date} ' .format(deal_date=deal_date), ui_log) alpha_res = QA_util_to_json_from_pandas(alpha_data) index_res = QA_util_to_json_from_pandas(index_data) week_res = QA_util_to_json_from_pandas(week_data) QA_util_log_info( '##JOB02 Got Data index quant data ============== {deal_date}' .format(deal_date=deal_date), ui_log) try: alpha.insert_many(alpha_res, ordered=False) QA_util_log_info( '##JOB03 Now index quant data alpha saved ============== {deal_date} ' .format(deal_date=deal_date), ui_log) except Exception as e: if isinstance(e, MemoryError): alpha.insert_many(alpha_res, ordered=True) elif isinstance(e, pymongo.bulk.BulkWriteError): pass try: week.insert_many(week_res, ordered=False) QA_util_log_info( '##JOB03 Now index quant data week saved ============== {deal_date} ' .format(deal_date=deal_date), ui_log) except Exception as e: if isinstance(e, MemoryError): week.insert_many(week_res, ordered=True) elif isinstance(e, pymongo.bulk.BulkWriteError): pass try: index.insert_many(index_res, ordered=False) QA_util_log_info( '##JOB03 Now index quant data index saved ============== {deal_date} ' .format(deal_date=deal_date), ui_log) except Exception as e: if isinstance(e, MemoryError): index.insert_many(index_res, ordered=True) elif isinstance(e, pymongo.bulk.BulkWriteError): pass else: QA_util_log_info( '##JOB01 No Data index_quant_datat ============== {deal_date} ' .format(deal_date=deal_date), ui_log)