def calc(market, sql): if (market == 'SH'): sql = "select code from v_stockcode where code like '6%%' order by code " if (market == 'SZ'): sql = "select code from v_stockcode where code like '0%%' order by code " if (market == 'CYB'): sql = "select code from v_stockcode where code like '3%%' order by code " for row in hq._excutesql(sql): #print(row['code']) sql = sql_body + row['code'] + sql_foot df = pd.DataFrame(hq._excutesql(sql).fetchall()) if (df.empty): print(row['code'] + ' is empty') continue else: df.columns = ['datetime', 'code', 'close', 'vol'] df['vol5'] = tsu.MA(df['vol'], 5) df['mean'] = df['vol5'].shift(-5) df['vr'] = df['vol'] / df['mean'] df['vr'] = df['vr'].map(c.FORMAT) df['vr'] = df['vr'].astype(float) for a in c.MA: if isinstance(a, int): df['ma%s' % a] = tsu.MA(df['close'], a).map( c.FORMAT).shift(-(a - 1)) df1 = df.drop(['close', 'vol', 'vol5', 'mean'], axis=1) df1.ix[:0].to_sql('t_kline', c.ENGINE, if_exists='append')
def importcsv_excle(date): sql = "select function_name,file_path,sql_table_name,columns_name,format_columns,csv_excel from t_importfuction" for fn, fp, sn, cn, fc, isexcel in hq._excutesql(sql): csv_path = eval(fp.split('!')[0]) if (os.path.exists(csv_path)): print(fn) sql_d = "delete from " + sn + " where date = '" + date + "'" hq._excutesql(sql_d) if (isexcel == 'y'): excel_path = eval(fp.split('!')[1]) pd.read_csv(csv_path, encoding='gb18030').to_excel( excel_path, sheet_name=DATE) # csv转excle df = pd.read_excel(excel_path, sheet_name=DATE) os.remove(excel_path) else: df = pd.read_csv(csv_path, encoding='gb18030') df.reset_index(level=0, inplace=True) df.drop(df.columns[len(df.columns) - 1], axis=1, inplace=True) if (fn == 'importentrust'): df.drop(df.columns[0], axis=1, inplace=True) #print(df) df.columns = eval(cn) if len(fc) > 0: #整理数据格式 for i in range(len(fc.split('!'))): df[fc.split('!')[i].split('#')[0]] = df[fc.split( '!')[i].split('#')[0]].map( eval(fc.split('!')[i].split('#')[1])) df.to_sql(sn, c.ENGINE, if_exists='append') os.remove(csv_path) else: print("No file " + csv_path)
def get_dbl(date): # PRO_DBL = "SELECT a.ts_code FROM t_pro_daily a left join t_pro_dailybasic b on a.ts_code = b.ts_code and a.trade_date =b.trade_date where a.ma5 < a.ma10 and a.ma10 <a.ma30 and a.ma30 < a.ma60 and a.ma60 < a.ma120 and b.pe_ttm < 40 and b.pb <10 and b.turnover_rate_f < 1.5 and a.trade_date = '" + date + "' and a.ts_code < '680000' order by 1" #PRO_DBL = "SELECT a.ts_code FROM t_pro_daily a left join t_pro_dailybasic b on a.ts_code = b.ts_code and a.trade_date =b.trade_date where a.ma10 <a.ma30 and a.ma30 < a.ma60 and a.ma60 < a.ma120 and a.trade_date = '" + date + "' and b.turnover_rate_f < 1.5 and a.ts_code < '680000' and pb is not null order by 1" PRO_DBL = "SELECT a.ts_code FROM t_pro_daily a where a.ma10 <a.ma30 and a.ma30 < a.ma60 and a.ma60 < a.ma120 and a.trade_date = '" + date + "' and a.ts_code < '680000' order by 1" #print(PRO_DBL) list = [] for code in hq._excutesql(PRO_DBL).fetchall(): #print(code) df =pd.DataFrame(hq._excutesql("select trade_date,high,low,close from t_pro_daily where ts_code = '" +code[0]+"' and trade_date <= '"+date+"' order by trade_date desc LIMIT 120").fetchall()) df.columns=['date','high','low','close'] df['code'] =code[0] df = df.reindex(index=df.index[::-1]) df['diff'], df['dea'], df['macd'] = MACD(df['close'].values, fastperiod=12, slowperiod=26, signalperiod=9) df['junction'] = df['diff'] / df['dea'] low_min = df.groupby(['code'])['low'].min() df['diff_v'] = df['diff'] - df['diff'].shift(1) df_tmp = df[(df['junction'] < 0.99) & (df['junction'] > 0.95) & (df['diff_v'] > 0)] df_tmp['diff_v1'] = df_tmp['diff'] - df_tmp['diff'].shift(1) df_tmp['dea_v1'] = df_tmp['dea'] - df_tmp['dea'].shift(1) df_tmp['low_v1'] = df_tmp['low'] - df_tmp['low'].shift(1) df_tmp['diff_v2'] = df_tmp['diff'].shift(1) - df_tmp['diff'].shift(2) df_tmp['dea_v2'] = df_tmp['dea'].shift(1) - df_tmp['dea'].shift(2) df_tmp['low_v2'] = df_tmp['low'].shift(1) - df_tmp['low'].shift(2) df_tmp['low_v1'] = df_tmp['low'] - df_tmp['low'].shift(1) df_tmp['low_v2'] = df_tmp['low'].shift(1) - df_tmp['low'].shift(2) df_final = df_tmp[(df_tmp['diff_v1'] > 0) & (df_tmp['dea_v1'] > 0) & (df_tmp['low_v1'] < 0) & (df_tmp['diff_v2'] > 0) & (df_tmp['dea_v2'] > 0) & (df_tmp['low_v2'] < 0) & (df_tmp['low_v1']<0) & (df_tmp['low_v2']<0)] if (len(df_final)>0): list.append(code[0]) print(list) return list
def toDB_pro_stocklist(): print("toDB_pro_stocklist start " + str(datetime.datetime.now())) sql = 'delete from t_pro_stocklist' hq._excutesql(sql) pro.stock_basic( exchange_id='', fields='ts_code,symbol,name,list_date,delist_date,list_status').to_sql( 't_pro_stocklist', c.ENGINE, if_exists='append') print("toDB_pro_stocklist end " + str(datetime.datetime.now()))
def toDB_pro_dividend(): print("toDB_pro_dividend start " + str(datetime.datetime.now())) sql = 'delete from t_pro_dividend' hq._excutesql(sql) for row in hq._excutesql(c.PRO_STOCK_LIST): df = pro.dividend(ts_code=row[0]) if df is None: pass else: df['ts_code'] = df['ts_code'].map(c.PRO_CODE_FORMAT) df.to_sql('t_pro_dividend', c.ENGINE, if_exists='append') print("toDB_pro_dividend end " + str(datetime.datetime.now()))
def get_pro_kline(date): start = datetime.datetime.now() print(start) hq._excutesql("delete from t_kline") sql_body = "SELECT trade_date,ts_code,close,vol FROM t_pro_daily WHERE ts_code = '" sql_foot = "' and trade_date <= '" + date + "' order by trade_date desc limit 120" sql_u = "update t_pro_daily a,t_kline b set a.vr=b.vr,a.ma5=b.ma5,a.ma10=b.ma10,a.ma20=b.ma20,a.ma30=b.ma30,a.ma60=b.ma60,a.ma120=b.ma120 where a.trade_date =b.datetime and a.ts_code =b.code" def calc(market, sql): if (market == 'SH'): sql = "select code from v_stockcode where code like '6%%' order by code " if (market == 'SZ'): sql = "select code from v_stockcode where code like '0%%' order by code " if (market == 'CYB'): sql = "select code from v_stockcode where code like '3%%' order by code " for row in hq._excutesql(sql): #print(row['code']) sql = sql_body + row['code'] + sql_foot df = pd.DataFrame(hq._excutesql(sql).fetchall()) if (df.empty): print(row['code'] + ' is empty') continue else: df.columns = ['datetime', 'code', 'close', 'vol'] df['vol5'] = tsu.MA(df['vol'], 5) df['mean'] = df['vol5'].shift(-5) df['vr'] = df['vol'] / df['mean'] df['vr'] = df['vr'].map(c.FORMAT) df['vr'] = df['vr'].astype(float) for a in c.MA: if isinstance(a, int): df['ma%s' % a] = tsu.MA(df['close'], a).map( c.FORMAT).shift(-(a - 1)) df1 = df.drop(['close', 'vol', 'vol5', 'mean'], axis=1) df1.ix[:0].to_sql('t_kline', c.ENGINE, if_exists='append') try: t_sh = threading.Thread(target=calc, args=("SH", "sql")) t_sz = threading.Thread(target=calc, args=("SZ", "sql")) t_cyb = threading.Thread(target=calc, args=("CYB", "sql")) for t in [t_sh, t_sz, t_cyb]: t.start() for t in [t_sh, t_sz, t_cyb]: t.join() except Exception as e: print(e) print("############### UPDATE STARTING ###############") hq._excutesql(sql_u) end = datetime.datetime.now() print("get_pro_kline: " + str(end - start))
def lookup(remark): df = pd.DataFrame(hq._excutesql(c.SQL_RECORD).fetchall()) df.columns = ['date', 'code', 'type', 'remark'] dfdate = pd.DataFrame(hq._excutesql(c.SQL_TRADEDAY).fetchall()) count = 0 count_e = 0 list = [] list1 = [] for i in range(0, len(df)): str = "select date,code,high,low,close from t_hisdata_all where code ='" + df[ 'code'][i] + "' and date >'" + df['date'][ i] + "' order by date,code" dfs = pd.DataFrame(hq._excutesql(str).fetchall()) if (dfs.empty): continue dfs.columns = ['date', 'code', 'high', 'low', 'close'] max = dfs.groupby(['code'])['close'].max()[0] tmp = dfs['date'][np.where(dfs['close'] == max)[0]].to_string() date_max = tmp[len(tmp) - 10:len(tmp)] if (int(hq.get_days(df['date'][i], date_max, dfdate)) > 1): #最大值日期需T+2日以上 count = count + 1 list.append([ df['date'][i], date_max, df['code'][i], dfs['close'][0], max, (max - dfs['close'][0]) / dfs['close'][0] * 100, df['type'][i], df['remark'][i], 1 ]) else: count_e = count_e + 1 list.append([ df['date'][i], date_max, df['code'][i], dfs['close'][0], dfs['close'][len(dfs) - 1], (dfs['close'][len(dfs) - 1] - dfs['close'][0]) / dfs['close'][0] * 100, df['type'][i], df['remark'][i], 0 ]) list.append([count, (count + count_e), count / (count + count_e) * 100]) for j in list: if (len(j) == 9): list1.append(j) print(list) hq._excutesql("delete from t_lookup") df1 = pd.DataFrame(list1) df1.columns = [ 'fdate', 'sdate', 'code', 'bprice', 'sprice', 'aoi', 'type', 'parameter', 'flag' ] df1.insert(9, 'remark', remark) df1.to_sql("t_lookup", c.ENGINE, if_exists='append')
def get_hopeDOJI(): start = datetime.datetime.now() df = pd.DataFrame(hq._excutesql(c.SQL_DOJI).fetchall()) #print(c.SQL_DOJI) list = [] if (df.empty): return "NULL" else: df.columns = ['date', 'code', 'volume', 'p_change'] df_max = df.groupby(['code'])['volume'].max() df_sum = df.groupby(['code'])['volume'].sum() for code in df_max.index: i = np.where((df['code'] == code) & (df['volume'] == df_max[code]))[0][0] if (df['date'][i] == c.DATE): if (df['volume'][i] == df_max[code] and df['p_change'][i] > 0): if (df_max[code] / (df_sum[code] - df_max[code]) > c.DOJI): list.append(code) end = datetime.datetime.now() print("running: " + str(end - start)) # if(len(list)>0): # return list # else: # return "NULL" return list
def get_rise(period): start = datetime.datetime.now() list = [] i = j = 0 if period == "M": sql = c.SQL_MONTH cir = 3 com = 2 else: sql = c.SQL_WEEK cir = 4 com = 3 print(sql) df = pd.DataFrame(hq._excutesql(sql).fetchall()) df.columns = ['date', 'code', 'close', 'high', 'low', 'vol'] df_high_max = df.groupby(['code'])['high'].max() df_low_max = df.groupby(['code'])['low'].max() df_date_max = df.groupby(['code'])['date'].max()[0] for code in df_high_max.index: count = 0 i = np.where((df['code'] == code) & (df['high'] == df_high_max[code]))[0][0] j = np.where((df['code'] == code) & (df['low'] == df_low_max[code]))[0][0] if (i == j and df['date'][i] == df_date_max and len(df[df['code'] == code]) > 1): for m in range(cir): if (df['high'][i] / df['high'][i - 1] > 1.006): count = count + 1 i = i - 1 if (count > com): list.append(code) end = datetime.datetime.now() print("running: " + str(end - start)) return list
def get_shrink(d): start = datetime.datetime.now() df = pd.DataFrame(hq._excutesql(c.SQL_VSHRINK).fetchall()) print(c.SQL_VSHRINK) list = [] if (df.empty): print('empty') return list else: df.columns = ['date', 'code', 'volume', 'p_change'] df_min = df.groupby(['code'])['volume'].min() #max = df.groupby(['code'])['volume'].max() #mean = df.groupby(['code'])['volume'].mean() for code in df_min.index: i = np.where((df['code'] == code) & (df['volume'] == df_min[code]))[0][0] if (df['volume'][i] == df_min[code] and df['date'][i] == hq.get_lasttradeday(d)): list.append(df['code'][i]) end = datetime.datetime.now() print("running: " + str(end - start)) # if (len(list) > 0): # return list # else: # return "NULL" return list
def importrisk(date): csv_path = u'C:\\risk.csv' sql = "delete from t_opt_risk where date = '" + date + "'" hq._excutesql(sql) df = pd.read_csv(csv_path, encoding='gb18030') df.drop(df.columns[len(df.columns) - 1], axis=1, inplace=True) df.reset_index(level=0, inplace=True) df['index'] = df['index'].map(FORMAT_date) df.columns = [ 'date', 'ts_code', 'opt_code', 'name', 'Delta', 'Theta', 'Gamma', 'Vega', 'Rho' ] df['date'] = df['date'].map(FORMAT_SPACE) df['opt_code'] = df['opt_code'].map(FORMAT_SPACE) df['name'] = df['name'].map(FORMAT_SPACE) df.to_sql('t_opt_risk', c.ENGINE, if_exists='append') os.remove(csv_path)
def get_fb(date): #PRO_FB = "select a.ts_code from t_pro_daily a where a.ma5>a.ma10 and a.ma10 >a.ma20 and a.ma20 > a.ma30 and a.ma30 > a.ma60 and a.ma60 > a.ma120 and a.trade_date = '" + date.replace('-','') + "' and a.close<a.open" #and a.close <= a.pre_close" #PRO_FB = "select a.ts_code from t_pro_daily a where a.ma20 > a.ma30 and a.ma30 > a.ma60 and a.ma60 > a.ma120 and a.trade_date = '" + date.replace('-', '') + "' and a.close<a.open" # and a.close <= a.pre_close" PRO_FB = "select a.ts_code from t_pro_daily a where a.ma60 > a.ma120 and a.trade_date = '" + date.replace('-', '') + "' and a.close<a.open" # and a.close <= a.pre_close" #PRO_FB = "select a.ts_code from t_pro_daily a where a.trade_date = '" + date.replace('-','') + "' and a.close<=a.open" # and a.close <= a.pre_close" #PRO_FB = "select DISTINCT code from t_limit_detail where date <='"+date.replace('-','')+"' and date > '"+hq.get_Xtradedate(date,8).replace('-','')+"' and code in (select ts_code from t_pro_daily a where a.ma5>a.ma10 and a.ma10 >a.ma20 and a.ma20 > a.ma30 and a.ma30 > a.ma60 and a.ma60 > a.ma120 and trade_date = '"+date.replace('-','')+"' and close <= open ) order by 1" #8个交易日内有涨停 #PRO_FB = "select DISTINCT code from t_limit_detail where top = 0 and date <='" + date.replace('-','') + "' and date > '" + hq.get_Xtradedate(date, 8).replace('-', '') + "' and code in (select ts_code from t_pro_daily where trade_date = '" + date.replace('-', '') + "' and close <= open ) order by 1" # 8个交易日内有涨停 #print(PRO_FB) list = [] for code in hq._excutesql(PRO_FB).fetchall(): print(code) try: df = pd.DataFrame(hq._excutesql("select ts_code,trade_date,open,close,pre_close,amount,high from t_pro_daily where ts_code = '" +code[0]+"' and trade_date <= '"+date.replace('-','')+"' order by trade_date desc limit 4").fetchall()) #df = pd.DataFrame(hq._excutesql("select ts_code,trade_date,open,close,pre_close,vol,high from t_pro_daily where ts_code = 300619 and trade_date <=20210812 order by trade_date desc limit 4").fetchall()) df.columns = ['code','date', 'open','close', 'pre_close', 'vol','high'] df_tmp = df.drop(labels=3) # 下跌三天 df_min_vol = df_tmp.groupby(['code'])['vol'].min() df_max_vol = df_tmp.groupby(['code'])['vol'].max() #print(df) try: if(df[df['vol']==df_min_vol[0]]['date'][0]==date.replace('-','')): #基准日是最小量 # print(df['code'][0]) # print('111111111111111111111111') if(df['vol'][2]==df_max_vol[0]): #第二天成交量最高 # print(df['code'][0]) # print('22222222222222222') if(df['open'][3]<=df['close'][3]): #第一天是红的 # print(df['code'][0]) # print('33333333333333333333') if(df['open'][2]>df['close'][2]): # print(df['code'][0]) # print('4444444444444444444444444') if(df['open'][1] > df['close'][1]): list.append(code[0]) #print(df['code'][0]) #print(code[0]) #print(code[0]) except Exception as e: continue except Exception as e: continue print(list) return list
def check_record(start,end): s = datetime.datetime.now() #sql = "select code,type from t_record where type <> 'get_aoi' and date ='"+c.DATE+"'" #sql = "select code,pe from t_stockbasics order by code " sql = "select code,'' from t_daydata where datetime = '2018-07-04' and vr > 1 order by code " for row in hq._excutesql(sql): tor_vr(start,end,row[0],row[1]) f = datetime.datetime.now() print("running :"+str(f-s))
def get_aoi(type, **kwargs): start = datetime.datetime.now() df = pd.DataFrame(hq._excutesql(c.SQL_GRADIENT).fetchall()) #print(c.SQL_GRADIENT) list = [] min = 0 if (df.empty): print('empty') return list else: df.columns = ['date', 'code', 'high', 'low'] df_CURRENT = pd.DataFrame(hq._excutesql(c.SQL_TRADEDAY).fetchall()) df_code = df['code'].drop_duplicates() for key, value in kwargs.items(): if (key == 'top'): TOP = value else: LOW = value for code in df_code: df_tmp = df[df['code'] == code] min = df_tmp.groupby(['code'])['low'].min() max = df_tmp.groupby(['code'])['low'].max() if (type == 1): condition = "(max[0]-min[0])/min[0]*100>" + str(TOP) else: condition = "(max[0] - min[0]) / min[0] * 100 > " + str( LOW) + " and (max[0] - min[0]) / min[0] * 100 < " + str( TOP) if (eval(condition)): str_max = str(df_tmp[df_tmp['low'] == max[0]]['date']) str_min = str(df_tmp[df_tmp['low'] == min[0]]['date']) date_max = str_max[len(str_max) - 36:len(str_max) - 26] date_min = str_min[len(str_min) - 36:len(str_min) - 26] int_datemin = np.where((df_CURRENT[2] == date_min)) int_datecur = np.where((df_CURRENT[2] == c.DATE)) if ((int_datecur[0][0] - int_datemin[0][0]) > (float(c.DAYS) * 0.618)): if (date_max == c.DATE): list.append(code) end = datetime.datetime.now() print("running: " + str(end - start)) return list
def importentrust(date): csv_path = u'C:\\' + DATE + '.csv' excel_path = u'C:\\' + DATE + '.xlsx' sql = "delete from t_entrust where date = '" + date + "'" hq._excutesql(sql) #df = pd.read_csv(excel_path, index_col=False, quoting=3, sep=" ",encoding='gb18030') #csv_to_excle() pd.read_csv(csv_path, encoding='gb18030').to_excel(excel_path, sheet_name=DATE) #csv转excle df = pd.read_excel(excel_path, sheet_name=DATE) df.drop(df.columns[len(df.columns) - 1], axis=1, inplace=True) df.drop(['序号'], axis=1, inplace=True) print(df) df.columns = [ 'date', 'time', 'market', 'code', 'name', 'deal', 'type', 'price', 'quantity', 'amount', 'reserve_flag', 'serial', 'remark', 'fee' ] df['date'] = df['date'].map(FORMAT_date) df.to_sql('t_opt_entrust', c.ENGINE, if_exists='append') os.remove(excel_path) os.remove(csv_path)
def toDB_pro_conceptDetail(): for id, name in hq._excutesql( "select code,name from t_pro_concept order by code"): df = pro.concept_detail(id=id[0], fields='ts_code,name,in_date,out_date') if df is None: pass else: df['ts_code'] = df['ts_code'].map(c.PRO_CODE_FORMAT) hq.Add_col(df, id=id, concept=name) time.sleep(10) df.to_sql('t_pro_concept_detail', c.ENGINE, if_exists='append') print("toDB_pro_conceptDetail end " + str(datetime.datetime.now()))
def tor_vr(start,end,code,para): sql = "select datetime,close,tor,vr,vol from t_daydata where datetime between '"+start+"' and '"+end+"' and code = '"+code+"'" title = code+" ["+start+","+end+" ] "+str(para) datetime = [] close = [] tor = [] vr = [] vol = [] for row in hq._excutesql(sql): datetime.append(row[0]) close.append(row[1]) tor.append(row[2]) vr.append(row[3]) vol.append(row[4]/100000) # 创建子图 fig, ax = plt.subplots() #fig.subplots_adjust(bottom=0.2) plt.figure(1,figsize=(150, 130)) plt.subplot(212) # 设置X轴刻度为日期时间 ax.xaxis_date() plt.title(title) plt.xticks()#pd.date_range(start,end)) plt.yticks() #plt.xlabel("BLACK close,YELLOW tor,GREEN vr,BLUE vol") plt.ylabel("") #plt.plot(datetime,close,color = 'black') plt.plot(datetime, tor,color = 'yellow') plt.plot(datetime, vr,color = 'red') plt.xlabel("YELLOW tor,RED vr") #plt.plot(datetime, vol, color='blue') plt.grid() plt.subplot(221) plt.plot(datetime, vol, color='blue') plt.xlabel("BLUE vol") plt.grid() plt.subplot(222) plt.plot(datetime, close, color='black') plt.xlabel("BLACK close") plt.grid() fig.set_size_inches(15, 10) path = "C:/image/"+c.DATE.replace("-","")+"/" if not os.path.exists(path): os.mkdir(path) plt.savefig(path+title[:6]+".jpg") #plt.show() plt.close()
def get_soared(d, uod): start = datetime.datetime.now() if (uod == 'u'): sql = c.SQL_SOARED else: sql = c.SQL_FALL df = pd.DataFrame(hq._excutesql(sql).fetchall()) print(sql) list = [] if (df.empty): return list else: df.columns = [ 'date', 'code', 'volume', 'turnover', 'open', 'close', 'high', 'low' ] tmp = 0 i = 0 df_max = df.groupby(['code'])['volume'].max() for code in df_max.index: i = np.where((df['code'] == code) & (df['volume'] == df_max[code]))[0][0] high = df['high'][i] open = df['open'][i] close = df['close'][i] low = df['low'][i] if (uod == 'u'): if (open > close): tmp = open else: tmp = close if (df['volume'][i] == df_max[code] and (high - tmp) / close < c.UPSD_RATIO): if (df['date'][i] == hq.get_lasttradeday(d)): # if (df['volume'][i] / df['volume'][i - 1] > float(c.VOL) and df['turnover'][i] / df['turnover'][ # i - 1] > float(c.TURNOVER)): list.append(df['code'][i]) else: if (open < close): tmp = open else: tmp = close if (df['volume'][i] == max and (tmp - low) / close > c.DOWNSD_RATIO): if (df['date'][i] == hq.get_lasttradeday(d)): list.append([df['date'][i], df['code'][i]]) end = datetime.datetime.now() print("running: " + str(end - start)) # return list
def toDB_pro_common(): start = datetime.datetime.now() for fn, tn, t, i, para in hq._excutesql( "select fname,tname,type,isdate,parameter from t_pro_functionmap where flag = 1 and isusual = 'Y'" ): print("###" + fn + "###" + tn + "###" + "###") try: if i == 'Y': sql_del = "delete from " + tn + " where trade_date = '" + c.DATE.replace( '-', '') + "'" else: sql_del = "delete from " + tn #print(sql_del) hq._excutesql(sql_del) sql_update = "update t_pro_functionmap set flag = 0 where tname ='" + tn + "'" hq._excutesql(sql_update) if t == 0: df = pro.query(fn, trade_date=c.DATE.replace('-', '')) if t == 1: fun = "pro." + fn + '(' + para + ')' df = eval(fun) try: if (fn == 'index_weight'): df['con_code'] = df['con_code'].map(c.PRO_CODE_FORMAT) else: df['ts_code'] = df['ts_code'].map(c.PRO_CODE_FORMAT) finally: df.to_sql(tn, c.ENGINE, if_exists='append') except: sql = "update t_pro_functionmap set flag = 3 where tname ='" + tn + "'" print(sql) hq._excutesql(sql) continue finally: sql = "update t_pro_functionmap set flag = 1 where flag = 0 or flag = 3" hq._excutesql(sql) end = datetime.datetime.now() print("get_pro_com: " + str(end - start))
def get_platform(): start = datetime.datetime.now() df = pd.DataFrame(hq._excutesql(c.SQL_PLATFORM).fetchall()) print(c.SQL_PLATFORM) list = [] if (df.empty): print('empty') return list else: #先判断是否进入平台期 date,code,close,high,low,ma5,ma10,ma20,p_change,volume,turnover df.columns = [ 'date', 'code', 'close', 'high', 'low', 'ma5', 'ma10', 'ma20', 'p_change', 'volume', 'turnover' ] df_fly = df[(df['date'] == c.DATE) & (df['low'] > df['ma5'])] #飞龙在天的 df_plat = df[(df['date'] == c.DATE) & ((df['low'] < df['ma5']) | (df['low'] == df['ma5']))] #已经着落的 max_fly_high = df_fly.groupby(['code'])['high'].max() max_plat_high = df_plat.groupby(['code'])['high'].max()
def cal(list, date): sql = "select ts_code,trade_date,`close`,high,low from t_pro_daily where ts_code in (" + str( list).replace('[', '').replace( ']', '') + ") and trade_date >= '" + date + "'" df = pd.DataFrame(hq._excutesql(sql).fetchall()) df.columns = ['code', 'date', 'close', 'high', 'low'] df_max = df.groupby(['code'])['high'].max() df_min = df.groupby(['code'])['low'].min() # max_list =[] # min_list=[] # max_pd = pd.DataFrame(columns=['code','dfindex','date','close','flag']) # min_pd = pd.DataFrame(columns=['code', 'dfindex', 'date','close','flag']) max_pd = pd.DataFrame(columns=['code', 'date', 'close', 'flag']) min_pd = pd.DataFrame(columns=['code', 'date', 'close', 'flag']) for code in df_max.index: i = np.where((df['code'] == code) & (df['high'] == df_max[code]))[0][0] # max_list.append(code) # max_list.append(i) # max_list.append(df.iloc[i]['date']) # max_pd = max_pd.append({'code':code,'dfindex':str(i),'date':df.iloc[i]['date'],'close':df.iloc[i]['close'],'flag':'max'},ignore_index=True) max_pd = max_pd.append( { 'code': code, 'date': df.iloc[i]['date'], 'close': df.iloc[i]['close'], 'flag': 'max' }, ignore_index=True) for code in df_min.index: #print(code) i = np.where((df['code'] == code) & (df['low'] == df_min[code]))[0][0] # min_list.append(code) # min_list.append(i) # min_list.append(df.iloc[i]['date']) #min_pd = min_pd.append({'code': code, 'dfindex': str(i), 'date': df.iloc[i]['date'],'close':df.iloc[i]['close'],'flag':'min'}, ignore_index=True) min_pd = min_pd.append( { 'code': code, 'date': df.iloc[i]['date'], 'close': df.iloc[i]['close'], 'flag': 'min' }, ignore_index=True) #df_all = pd.concat([max_pd, min_pd], axis=0, ignore_index=True) df_base = df[df['date'] == date].drop(['high', 'low'], axis=1) df_base.insert(3, 'flag', 'base') df_tmp = pd.merge(max_pd, min_pd, on=['code']) df_all = pd.merge(df_tmp, df_base, on=['code']) df_all['收益率'] = (df_all['close_x'] / df_all['close'] - 1).map(lambda x: round(x, 2)) * 100 df_all['最大收益天数'] = df_all['date_x'].astype(int) - df_all['date'].astype( int) df_all['亏损率'] = (df_all['close_y'] / df_all['close'] - 1).map(lambda x: round(x, 2)) * 100 df_all['最大亏损天数'] = df_all['date_y'].astype(int) - df_all['date'].astype( int) df_all['date'] = pd.to_datetime(df_all['date']) df_all['date_x'] = pd.to_datetime(df_all['date_x']) df_all['date_y'] = pd.to_datetime(df_all['date_y']) #df_all['min_day'] =pd.DataFrame(df_all['date_y']-df_all['date']) # print(df_all['date_y']) # print(df_all['date']) #filename = r"C:\Users\Administrator\Desktop\\"+ date +".xlsx" filename = r"E:\huice\\" + date + ".xlsx" df_all.to_excel(filename, sheet_name='passengers', index=False)
def toDB_pro_funcitonname(fname, tname, type, comment): sql = "INSERT INTO t_pro_functionmap(fname,tname,type,comment) values('" + fname + "','" + tname + "'," + str( type) + ",'" + comment + "')" hq._excutesql(sql)