def start_service(): if w.isconnected(): w.start() try: app.run(host="0.0.0.0", debug=True) finally: w.close()
def run_wsd(in_DataFrame=pd.DataFrame()): from WindPy import w w.start() gs_result_wind = pd.DataFrame() temp_result = in_DataFrame for code in temp_result['full_code']: # 债券最新面值 latest_par = w.wsd(code, "latestpar", "ED-0D", get_lastday(), "") gs_result_code = latest_par.Codes[0] code_latest_par = latest_par.Data[0][0] # wind债券一级分类 windl1_type = w.wss(code, "windl1type") code_windl1_type = windl1_type.Data[0][0] # 交易场所 exch_city = w.wss(code, "exch_city") code_exch_city = exch_city.Data[0][0] # 估价全价(中债) dirty_cnbd = w.wsd(code, "dirty_cnbd", "ED0D", get_lastday(), "credibility=1") code_dirty_cnbd = dirty_cnbd.Data[0][0] # 收盘价(全价) dirty_price = w.wsd(code, "dirtyprice", "ED0D", get_lastday(), "") code_dirty_price = dirty_price.Data[0][0] s2 = pd.Series( [get_lastday(), gs_result_code, code_latest_par, code_windl1_type, code_exch_city, code_dirty_cnbd, code_dirty_price], \ index=['date', 'full_code', 'code_latest_par', 'code_windl1_type', 'code_exch_city', 'code_dirty_cnbd', \ 'code_dirty_price']) gs_result_wind = gs_result_wind.append(s2, ignore_index=True) merge_result = pd.merge(temp_result, gs_result_wind) w.close() return merge_result
def data_from_wind(code, freq, start="", end=""): w.start() if freq == 'D': df = w.wsd(code, "OPEN,HIGH,LOW,CLOSE", start, end, "") else: df = w.edb(code, start, end, "Fill=Previous") w.close() kl = to_dataframe(df, freq) return kl
def import_wind_future_daily(): # w.wsd("AG1612.SHF", "open,high,low,close,volume,amt,dealnum,settle,oi,st_stock", "2016-11-01", "2016-12-21", "") sql_str = """select fi.wind_code, ifnull(trade_date_max_1, ipo_date) date_frm, if(subdate(curdate(), 1)<lasttrade_date,subdate(curdate(), 1),lasttrade_date) date_to from wind_future_info fi left outer join (select wind_code, adddate(max(trade_date),1) trade_date_max_1 from wind_future_daily group by wind_code) wfd on fi.wind_code = wfd.wind_code""" engine = get_db_engine() future_date_dic = {} with get_db_session(engine) as session: table = session.execute(sql_str) for wind_code, date_frm, date_to in table.fetchall(): future_date_dic[wind_code] = (date_frm, date_to) data_df_list = [] w.start() try: for wind_code, (date_frm, date_to) in future_date_dic.items(): if date_frm > date_to: continue print('get %s between %s and %s' % (wind_code, date_frm, date_to)) data_df_tmp = wsd_cache(w, wind_code, "open,high,low,close,volume,amt,dealnum,settle,oi,st_stock", date_frm, date_to, "") data_df_tmp['wind_code'] = wind_code data_df_list.append(data_df_tmp) # if len(data_df_list) >= 10: # break finally: data_df_count = len(data_df_list) if data_df_count > 0: print('merge data with %d df' % data_df_count) data_df = pd.concat(data_df_list) data_df.index.rename('trade_date', inplace=True) data_df = data_df.reset_index().set_index(['wind_code', 'trade_date']) data_df.rename(columns={c: str.lower(c) for c in data_df.columns}, inplace=True) data_df.rename(columns={'oi': 'position'}, inplace=True) data_count = data_df.shape[0] data_df.to_sql('wind_future_daily', engine, if_exists='append', index_label=['wind_code', 'trade_date'], dtype={ 'wind_code': String(20), 'trade_date': Date, 'open': Float, 'high': Float, 'low': Float, 'close': Float, 'volume': Float, 'amt': Float, 'dealnum': Float, 'settle': Float, 'position': Float, 'st_stock': Float, }) print('%d data import' % data_count) else: print('no data for merge') w.close()
def get_mkt_data_minutes( d_df: pd_DataFrame, freq=30 ) -> Tuple[pd_DataFrame, pd_DataFrame, pd_DataFrame, pd_DataFrame]: if op_exists(MKT_DATA_FILE): d_op_df = pd_read_hdf(MKT_DATA_FILE, key="do") u_op_df = pd_read_hdf(MKT_DATA_FILE, key="uo") d_cp_df = pd_read_hdf(MKT_DATA_FILE, key="dc") u_cp_df = pd_read_hdf(MKT_DATA_FILE, key="uc") else: def _data2df(data): tdf = pd_DataFrame(data=data.Data, columns=pd_DatetimeIndex(data.Times), index=["code", "p"]).T rst_df = pd_concat([df["p"] for _, df in tdf.groupby("code")], axis=1) rst_df.columns = [tkr for tkr, _ in tdf.groupby("code")] return rst_df.astype(float) w_obj.start() data = w_obj.wsi(",".join(d_df["d_code"].to_list()), "open", START_DT, END_DT, f"BarSize={freq}") d_op_df = _data2df(data) data = w_obj.wsi(",".join(d_df["u_code"].unique().tolist()), "close", START_DT, END_DT, f"BarSize={freq}") u_op_df = pd_DataFrame(data=data.Data, columns=pd_DatetimeIndex(data.Times), index=data.Codes).T.astype(float) data = w_obj.wsi(",".join(d_df["d_code"].to_list()), "close", START_DT, END_DT, f"BarSize={freq}") d_cp_df = _data2df(data) data = w_obj.wsi(",".join(d_df["u_code"].unique().tolist()), "close", START_DT, END_DT, f"BarSize={freq}") u_cp_df = pd_DataFrame(data=data.Data, columns=pd_DatetimeIndex(data.Times), index=data.Codes).T.astype(float) w_obj.close() assert len(d_op_df) == len(u_op_df) == len(d_cp_df) == len(u_cp_df) d_op_df.sort_index(inplace=True) d_op_df.to_hdf(MKT_DATA_FILE, key="do") u_op_df.sort_index(inplace=True) u_op_df.to_hdf(MKT_DATA_FILE, key="uo") d_cp_df.sort_index(inplace=True) d_cp_df.to_hdf(MKT_DATA_FILE, key="dc") u_cp_df.sort_index(inplace=True) u_cp_df.to_hdf(MKT_DATA_FILE, key="uc") return d_op_df, u_op_df, d_cp_df, u_cp_df
def download_daily_data(date_=TODAY, is_overwrite_=False): w.start() r = pd.DataFrame() if calendar.is_trading_date(date_): temp_r = download_ashare_daily_data(w, date_, is_overwrite_) r = pd.concat([r, temp_r], axis=1) # if calendar.is_trading_date(date_, exchange_='hk'): temp_r = download_hk_daily_data(w, date_, is_overwrite_) r = pd.concat([r, temp_r], axis=1) w.close() return r
def get_mkt_data_days( d_df: pd_DataFrame ) -> Tuple[pd_DataFrame, pd_DataFrame, pd_DataFrame, pd_DataFrame]: if op_exists(MKT_DATA_FILE): d_op_df = pd_read_hdf(MKT_DATA_FILE, key="do") u_op_df = pd_read_hdf(MKT_DATA_FILE, key="uo") d_cp_df = pd_read_hdf(MKT_DATA_FILE, key="dc") u_cp_df = pd_read_hdf(MKT_DATA_FILE, key="uc") else: w_obj.start() data = w_obj.wsd(",".join(d_df["d_code"].to_list()), "open", START_DT, END_DT, "") d_op_df = pd_DataFrame(data=data.Data, columns=pd_DatetimeIndex(data.Times), index=data.Codes).T.astype(float) data = w_obj.wsd(",".join(d_df["u_code"].unique().tolist()), "close", START_DT, END_DT, "") u_op_df = pd_DataFrame(data=data.Data, columns=pd_DatetimeIndex(data.Times), index=data.Codes).T.astype(float) data = w_obj.wsd(",".join(d_df["d_code"].to_list()), "close", START_DT, END_DT, "") d_cp_df = pd_DataFrame(data=data.Data, columns=pd_DatetimeIndex(data.Times), index=data.Codes).T.astype(float) data = w_obj.wsd(",".join(d_df["u_code"].unique().tolist()), "close", START_DT, END_DT, "") u_cp_df = pd_DataFrame(data=data.Data, columns=pd_DatetimeIndex(data.Times), index=data.Codes).T.astype(float) w_obj.close() assert len(d_op_df) == len(u_op_df) == len(d_cp_df) == len(u_cp_df) d_op_df.sort_index(inplace=True) d_op_df.to_hdf(MKT_DATA_FILE, key="do") u_op_df.sort_index(inplace=True) u_op_df.to_hdf(MKT_DATA_FILE, key="uo") d_cp_df.sort_index(inplace=True) d_cp_df.to_hdf(MKT_DATA_FILE, key="dc") u_cp_df.sort_index(inplace=True) u_cp_df.to_hdf(MKT_DATA_FILE, key="uc") return d_op_df, u_op_df, d_cp_df, u_cp_df
def post(self): """ json str:{"begin_time": "2017-3-31", "end_time": "2017-3-31", "options": ""} :return: 返回万得返回数据dict """ data_dic = request.json logger.info('/tdays/ data_dic:%s', data_dic) begin_time = data_dic['begin_time'] end_time = data_dic['end_time'] options = data_dic['options'] if not w.isconnected(): w.start() if options == "": options = None ret_data = w.tdays(begin_time, end_time, options) error_code = ret_data.ErrorCode if error_code != 0: msg = ERROR_CODE_MSG_DIC.setdefault(error_code, "") logger.error('tdays("%s", "%s", "%s") ErrorCode=%d %s' % (begin_time, end_time, options, error_code, msg)) if ret_data.ErrorCode == 40521010: w.close() w.start() logger.warning('网络连接超时,端口重新启动') return {'error_code': ret_data.ErrorCode, 'message': msg}, 404 # if ret_data.ErrorCode != 0: # logger.error( # 'tdays("%s", "%s", "%s") ErrorCode=%d' % (begin_time, end_time, options, ret_data.ErrorCode)) # if ret_data.ErrorCode == 40521010: # w.close() # w.start() # logger.warning('网络连接超时,端口重新启动') # return {'error_code': ret_data.ErrorCode}, 404 # 将 Data数据中所有 datetime date 类型的数据转换为 string if len(ret_data.Data) > 0 and len(ret_data.Data[0]) > 0: # date_str = format_datetime_to_str(ret_data.Data[0][0]) # ret_df = pd.DataFrame({'date': [format_datetime_to_str(d) for d in ret_data.Data[0]]}) # ret_df.index = [str(idx) for idx in ret_df.index] # ret_dic = {'date': [format_datetime_to_str(d) for d in ret_data.Data[0]]} ret_dic = [format_2_date_str(d) for d in ret_data.Data[0]] else: logger.warning('tdays(%s, %s, %s) No value return' % (begin_time, end_time, options)) ret_dic = [] # ret_dic = ret_df.to_dict() # print('tdays:\n', ret_dic) return ret_dic
def update_all(self): w.start() date = toffsets.datetime.now().date() self.make_backup(dirname='src') try: self.update_meta_data(date) self.update_month_map_data(date) except Exception as e: self.restore_backup(dirname='src') print("""During updating meta data an error happend, msg: {}. Please retry with other Wind account if the error's happening is due to exceeding the usage limit.""".format(e)) return self.update_daily_data() self.update_monthly_data() print("Update All Data Successfully!") w.close()
def import_wind_future_munite(): # w.wsd("AG1612.SHF", "open,high,low,close,volume,amt,dealnum,settle,oi,st_stock", "2016-11-01", "2016-12-21", "") sql_str = """select fi.wind_code, ifnull(trade_date_max_1, subdate(ipo_date, 1)) date_frm, if(subdate(curdate(), 1)<lasttrade_date,subdate(curdate(), 1),lasttrade_date) date_to from wind_future_info fi left outer join (select wind_code, adddate(max(trade_date),1) trade_date_max_1 from wind_future_minute group by wind_code) wfd on fi.wind_code = wfd.wind_code order by fi.wind_code desc""" engine = get_db_engine() future_date_dic = OrderedDict() with get_db_session(engine) as session: table = session.execute(sql_str) for wind_code, date_frm, date_to in table.fetchall(): future_date_dic[wind_code] = (date_frm, date_to) data_df_list = [] w.start() try: for wind_code, date_pair in future_date_dic.items(): # if wind_code not in ('AU1703.SHF', 'AU1611.SHF'): # continue date_frm, date_to = date_pair if date_to < date.today() - timedelta(1000): print('%s 三年以上历史分钟线数据无法获取' % wind_code) continue if date_frm >= date_to: continue date_frm_str = date_frm.strftime('%Y-%m-%d 20:59:00') date_to_str = date_to.strftime('%Y-%m-%d 15:00:00') print('get %s between %s and %s' % (wind_code, date_frm_str, date_to_str)) data_df_tmp = wsi_cache(w, wind_code, "open,high,low,close,volume,amt,oi", date_frm_str, date_to_str, "") if data_df_tmp is None: continue data_df_tmp = data_df_tmp[data_df_tmp['volume'].apply(lambda x: x > 0)] if data_df_tmp.shape[0] == 0: print('%s has no available data' % wind_code) continue data_df_tmp['wind_code'] = wind_code data_df_list.append(data_df_tmp) if len(data_df_list) > 20: df_to_sql(data_df_list) data_df_list = [] finally: df_to_sql(data_df_list) w.close()
def read_data(csv, file_path=None): if csv == 1 and file_path is None: raise Exception('Wrong parameter') elif csv == 1 and file_path is not None: data = pd.read_csv(file_path) return data elif csv == 0 and file_path is None: ##通过wind接口读取 from WindPy import w w.start() data = w.wsd("I.DCE", "open,high,low,close,volume,amt", "2000-11-02", str(datetime.date.today()), "") df = pd.DataFrame() df['update_date'] = data.Times for i in range(len(data.Fields)): df[data.Fields[i]] = data.Data[i] df[data.Fields[i]] = df[data.Fields[i]].apply(float) df['update_date'] = pd.to_datetime(df['update_date']) df['update_date'] = df['update_date'].apply(lambda x: x.date()) w.close() return df elif csv == 0 and file_path is not None: ##也通过wind接口读取 from WindPy import w w.start() data = w.wsd("I.DCE", "open,high,low,close,volume,amt", "2000-11-02", str(datetime.date.today()), "") df = pd.DataFrame() df['update_date'] = data.Times for i in range(len(data.Fields)): df[data.Fields[i]] = data.Data[i] df[data.Fields[i]] = df[data.Fields[i]].apply(float) df['update_date'] = pd.to_datetime(df['update_date']) df['update_date'] = df['update_date'].apply(lambda x: x.date()) w.close() return df
def QA_fetch_get_stock_day(name, startDate, endDate): from WindPy import w #w.start w.start() w.isconnected() if(is_valid_date(endDate)==False): print ("wrong date") else : print (w.isconnected()) w.close() print (w.isconnected()) w.start() print (w.isconnected()) data=w.wsd(name,"sec_name,pre_close,open,high,low,close,volume,amt,dealnum,chg,pct_chg,swing,vwap,adjfactor,turn,free_turn,lastradeday_s,last_trade_day,rel_ipo_chg,rel_ipo_pct_chg,trade_status,susp_days,susp_reason,maxupordown,open3,high3,low3,close3",startDate,endDate, "Fill=Previous;PriceAdj=F") #data=w.wsd("000002.SZ", "open,high,low,close,volume", "2017-03-03", "2017-04-01", "PriceAdj=B") print(data.ErrorCode) if (data.ErrorCode==0): print ("Connent to Wind successfully") return data.Data
def getDataFromWind(self, startDate): totalData = {} w.start() for code in self.wsetData: self.PrintInfo("获取当前指数的历史数据 : %s" % code) wsetdata = w.wsd(codes=code, fields=self.dataName, beginTime=startDate) if wsetdata.ErrorCode != 0: self.PrintInfo("获取当前指数的历史数据异常 : %s" % code) continue tempDf = pd.DataFrame(wsetdata.Data, index=wsetdata.Fields, columns=wsetdata.Times).T tempDf['CODE'] = code tempDf['UPDATE'] = wsetdata.Times totalData[code] = tempDf w.close() return totalData
def wrap(data: pd.DataFrame, group_target: str, **kwargs): w.start() data = data.groupby(by=group_target, group_keys=False).apply( partial(fun, **kwargs)).reset_index(drop=True) w.close() return data
def getFundNetData(self, fundCodeList=[], startDate='2006-01-01', endDate=date.today().strftime('%Y-%m-%d'),SourceFlag='Wind'): ''' 获取基金历史净值数据,ifind或wind :return:DataFrame ''' if not fundCodeList: self.PrintInfoDemo.PrintLog('获取的目标基金代码列表为空,请检查!') return pd.DataFrame() netValueDf = self.getDataLocal(CodeList=fundCodeList,dataFlag='Fund') if not netValueDf.empty: return netValueDf everyGrop = 10 if SourceFlag=='Wind': w.start() filed = 'NAV_adj' # 复权单位净值 group = 0 dfList = [] for fundNum in range(0,len(fundCodeList),everyGrop): group = group + 1 self.PrintInfoDemo.PrintLog('获取第%s组'%str(group)) if fundNum + everyGrop<len(fundCodeList): tempCodeList = fundCodeList[fundNum:fundNum+everyGrop] else: tempCodeList = fundCodeList[fundNum:] tempNetValue = w.wsd(codes=tempCodeList, fields=filed, beginTime=startDate, endTime=endDate,options='Fill=Previous') if tempNetValue.ErrorCode != 0: self.PrintInfoDemo.PrintLog(infostr='wind读取基金净值数据失败,错误代码: ', otherInfo=tempNetValue.ErrorCode) return pd.DataFrame() tempNetValueDf = pd.DataFrame(tempNetValue.Data, index=tempNetValue.Codes, columns=tempNetValue.Times).T writer = pd.ExcelWriter( r"C:\\Users\\lenovo\\PycharmProjects\\FundPoolSelect\\GetDataSource\\FundNetValueDF\\"+"复权单位净值_Group%s.xlsx"%(str(group))) tempNetValueDf.to_excel(writer) writer.save() dfList.append(tempNetValueDf) w.close() netValueDf = pd.concat(dfList,axis=1,join='outer') return netValueDf else: thsLogin = THS_iFinDLogin("zszq5072", "754628") if thsLogin not in [0, -201]: self.PrintInfoDemo.PrintLog('登录ifind失败,请检查!') return pd.DataFrame() group = 0 dfNetList = [] for fundNum in range(0, len(fundCodeList), everyGrop): group = group + 1 self.PrintInfoDemo.PrintLog('获取第%s组' % str(group)) if fundNum + everyGrop < len(fundCodeList): tempCodeList = fundCodeList[fundNum:fundNum + everyGrop] else: tempCodeList = fundCodeList[fundNum:] codeListStr = ','.join(tempCodeList) indicators = 'adjustedNAV' params = 'Interval:D,CPS:1,baseDate:1900-01-01,Currency:YSHB,fill:Previous' data = THS_HistoryQuotes(codeListStr, indicators, params, startDate, endDate) if data['errorcode'] != 0: self.PrintInfoDemo.PrintLog(infostr='ifind获取指数数据失败,错误代码: ', otherInfo=data['errorcode']) return pd.DataFrame() tData = THS_Trans2DataFrame(data) dfListIn = [] for code, tempdf in tData.groupby(by=['thscode']): tempdf.set_index('time', drop=True, inplace=True) tempFianlDf = tempdf.rename(columns={indicators: code}).drop(labels=['thscode'], axis=1) dfListIn.append(tempFianlDf) tempNetValueDf = pd.concat(dfListIn, axis=1, join='outer') writer = pd.ExcelWriter( r"C:\\Users\\lenovo\\PycharmProjects\\FundPoolSelect\\GetDataSource\\HistoryData\\FundNetValueDF\\" + "复权单位净值_Group%s.xlsx" % ( str(group))) tempNetValueDf.to_excel(writer) writer.save() dfNetList.append(tempNetValueDf) thsLogout = THS_iFinDLogout() netValueDf = pd.concat(dfNetList, axis=1, join='outer') return netValueDf
def getIndexData(self,indexCodeList=[],startDate='2006-01-01', endDate=date.today().strftime('%Y-%m-%d'),SourceFlag='Wind',method='NotBench'): ''' 获取指数历史数据 :param indexCodeList: 指数代码列表 :param startDate: 指数开始时间 :param endDate: 指数截止时间 :param SourceFlag: 获取数据的来源标签 :return: DataFrame ''' if not indexCodeList: self.PrintInfoDemo.PrintLog('获取的目标指数代码列表为空,请检查!') return pd.DataFrame() indexDf = self.getDataLocal(CodeList=indexCodeList,dataFlag='Index',method=method) if not indexDf.empty: return indexDf if SourceFlag == 'Wind': w.start() filed = 'close' tempIndexValue = w.wsd(codes=indexCodeList, fields=filed, beginTime=startDate, endTime=endDate, options='') if tempIndexValue.ErrorCode != 0: self.PrintInfoDemo.PrintLog(infostr='wind读取指数数据失败,错误代码: ', otherInfo=tempIndexValue.ErrorCode) return pd.DataFrame() IndexValueDf = pd.DataFrame(tempIndexValue.Data, index=tempIndexValue.Codes, columns=tempIndexValue.Times).T writer = pd.ExcelWriter( r"C:\\Users\\lenovo\\PycharmProjects\\FundPoolSelect\\GetDataSource\\HistoryData\\" + "IndexValueDf.xlsx") IndexValueDf.to_excel(writer) writer.save() self.PrintInfoDemo.PrintLog(infostr='wind读取指数数据成功,存入本地文件 ') w.close() return IndexValueDf else: thsLogin = THS_iFinDLogin("zszq5072", "754628") if thsLogin not in [0, -201]: self.PrintInfoDemo.PrintLog('登录ifind失败,请检查!') return pd.DataFrame() codeListStr = ','.join(indexCodeList) indicators = 'ths_close_price_index' initParams='' params = 'Days:Tradedays,Fill:Previous,Interval:D' data = THS_DateSerial(codeListStr, indicators,initParams,params, startDate, endDate) if data['errorcode'] != 0: self.PrintInfoDemo.PrintLog(infostr='ifind获取指数数据失败,错误代码: ', otherInfo=data['errorcode']) return pd.DataFrame() tData = THS_Trans2DataFrame(data) dfListIn = [] for code, tempdf in tData.groupby(by=['thscode']): tempdf.set_index('time', drop=True, inplace=True) tempFianlDf = tempdf.rename(columns={indicators: code}).drop(labels=['thscode'], axis=1) dfListIn.append(tempFianlDf) IndexValueDf = pd.concat(dfListIn, axis=1, join='outer') writer = pd.ExcelWriter( r"C:\\Users\\lenovo\\PycharmProjects\\FundPoolSelect\\GetDataSource\\HistoryData\\" + "IndexValueDf.xlsx" ) IndexValueDf.to_excel(writer) writer.save() self.PrintInfoDemo.PrintLog(infostr='ifind读取指数数据成功,存入本地文件 ') thsLogout = THS_iFinDLogout() return IndexValueDf
def import_wind_future_info(): # 获取已存在合约列表 sql_str = 'select wind_code, ipo_date from wind_future_info' engine = get_db_engine() with get_db_session(engine) as session: table = session.execute(sql_str) wind_code_ipo_date_dic = dict(table.fetchall()) # 通过wind获取合约列表 w.start() future_sectorid_dic_list = [ {'subject_name': 'CFE 沪深300', 'regex': r"IF\d{4}\.CFE", 'sectorid': 'a599010102000000', 'date_establish': '2010-4-16'}, {'subject_name': 'CFE 上证50', 'regex': r"IH\d{4}\.CFE", 'sectorid': '1000014871000000', 'date_establish': '2015-4-16'}, {'subject_name': 'CFE 中证500', 'regex': r"IC\d{4}\.CFE", 'sectorid': '1000014872000000', 'date_establish': '2015-4-16'}, {'subject_name': 'SHFE 黄金', 'regex': r"AU\d{4}\.SHF", 'sectorid': 'a599010205000000', 'date_establish': '2008-01-09'}, {'subject_name': 'SHFE 沪银', 'regex': r"AG\d{4}\.SHF", 'sectorid': '1000006502000000', 'date_establish': '2012-05-10'}, {'subject_name': 'SHFE 螺纹钢', 'regex': r"RB\d{4}\.SHF", 'sectorid': 'a599010206000000', 'date_establish': '2009-03-27'}, {'subject_name': 'SHFE 热卷', 'regex': r"HC\d{4}\.SHF", 'sectorid': '1000011455000000', 'date_establish': '2014-03-21'}, {'subject_name': 'DCE 焦炭', 'regex': r"J\d{4}\.SHF", 'sectorid': '1000002976000000', 'date_establish': '2011-04-15'}, {'subject_name': 'DCE 焦煤', 'regex': r"JM\d{4}\.SHF", 'sectorid': '1000009338000000', 'date_establish': '2013-03-22'}, {'subject_name': '铁矿石', 'regex': r"I\d{4}\.SHF", 'sectorid': '1000006502000000', 'date_establish': '2013-10-18'}, {'subject_name': '铅', 'regex': r"PB\d{4}\.SHF", 'sectorid': '1000002892000000', 'date_establish': '2011-3-24'}, ] wind_code_set = set() ndays_per_update = 60 # 获取历史期货合约列表信息 for future_sectorid_dic in future_sectorid_dic_list: subject_name = future_sectorid_dic['subject_name'] sector_id = future_sectorid_dic['sectorid'] regex_str = future_sectorid_dic['regex'] date_establish = datetime.strptime(future_sectorid_dic['date_establish'], STR_FORMAT_DATE).date() date_since = get_date_since(wind_code_ipo_date_dic, regex_str, date_establish) date_yestoday = date.today() - timedelta(days=1) while date_since <= date_yestoday: date_since_str = date_since.strftime(STR_FORMAT_DATE) # w.wset("sectorconstituent","date=2017-05-02;sectorid=a599010205000000") future_info_df = wset_cache(w, "sectorconstituent", "date=%s;sectorid=%s" % (date_since_str, sector_id)) wind_code_set |= set(future_info_df['wind_code']) # future_info_df = future_info_df[['wind_code', 'sec_name']] # future_info_dic_list = future_info_df.to_dict(orient='records') # for future_info_dic in future_info_dic_list: # wind_code = future_info_dic['wind_code'] # if wind_code not in wind_code_future_info_dic: # wind_code_future_info_dic[wind_code] = future_info_dic if date_since >= date_yestoday: break else: date_since += timedelta(days=ndays_per_update) if date_since > date_yestoday: date_since = date_yestoday # 获取合约列表 wind_code_list = [wc for wc in wind_code_set if wc not in wind_code_ipo_date_dic] # 获取合约基本信息 # w.wss("AU1706.SHF,AG1612.SHF,AU0806.SHF", "ipo_date,sec_name,sec_englishname,exch_eng,lasttrade_date,lastdelivery_date,dlmonth,lprice,sccode,margin,punit,changelt,mfprice,contractmultiplier,ftmargins,trade_code") future_info_df = wss_cache(w, wind_code_list, "ipo_date,sec_name,sec_englishname,exch_eng,lasttrade_date,lastdelivery_date,dlmonth,lprice,sccode,margin,punit,changelt,mfprice,contractmultiplier,ftmargins,trade_code,thours") future_info_df['MFPRICE'] = future_info_df['MFPRICE'].apply(mfprice_2_num) future_info_count = future_info_df.shape[0] future_info_df.rename(columns={c: str.lower(c) for c in future_info_df.columns}, inplace=True) future_info_df.index.rename('wind_code', inplace=True) future_info_df.to_sql('wind_future_info', engine, if_exists='append', dtype={ 'wind_code': String(20), 'trade_code': String(20), 'sec_name': String(50), 'sec_englishname': String(50), 'exch_eng': String(50), 'ipo_date': Date, 'lasttrade_date': Date, 'lastdelivery_date': Date, 'dlmonth': String(20), 'lprice': Float, 'sccode': String(20), 'margin': Float, 'punit': String(20), 'changelt': Float, 'mfprice': Float, 'contractmultiplier': Float, 'ftmargins': String(100), 'thours': String(200), }) print('%d data import' % future_info_count) w.close()
endTime = time.strftime(ISOTIMEFORMAT, time.localtime()) try: conn = MySQLdb.connect(host='127.0.0.1',user='******',passwd='root',db='MyChartTest',port=3306) cur = conn.cursor() conn.set_character_set('utf8') #获取历史最新时间 cur.execute("select date_add(max(time), interval 1 minute) from bwsi where trade_code = '" + trade_code + "'" ) beginTime = cur.fetchone()[0] #调用wind接口 w.start(); data=w.wsi(trade_code, "open,high,low,close,volume,amt", beginTime, endTime, "BarSize=1") w.close(); if data.ErrorCode == 0: t = 0 j = len(data.Fields) print data.Codes print data.Fields volume_idx = data.Fields.index('volume') amt_idx = data.Fields.index('amount') values = [] for time in data.Times: value = [u'龙净环保',data.Codes[0],time] print time, i = 0
def download_daily_data(date_=TODAY, is_overwrite_=False): """ Download daily date: 1.index 2.stock 3.future 4.option """ w.start() _root = CONFIG.get('DAILY_QUOTE', NotImplementedError) # index daily quote _quote_file_index = os.path.join(_root, f'index_{date_}.csv') if (not fsutils.is_file(_quote_file_index)) or ( fsutils.is_file(_quote_file_index) and is_overwrite_): BENCHMARK = ['H00016.SH', 'H00300.CSI', 'H00905.CSI', 'H00852.SH'] sql = f''' SELECT S_INFO_WINDCODE as symbol,S_DQ_PRECLOSE as pre_close,S_DQ_CLOSE as [close], S_DQ_CHANGE as change_price,S_DQ_PCTCHANGE as change_rate FROM [dbo].[AINDEXEODPRICES] WHERE S_INFO_WINDCODE in ({"'"+"','".join(BENCHMARK)+"'"}) and TRADE_DT='{date_}' ''' df = winddb.read(sql) # get other market index BENCHMARK = ['HSHKI.HI'] ret = set_data( w.wsd("HSHKI.HI", "windcode,pre_close,close,chg,pct_chg", date_, date_, "")) tmp_df = pd.DataFrame.from_dict(ret) tmp_df.rename(columns={ 'windcode': 'symbol', 'chg': 'change_price', 'pct_chg': 'change_rate' }, inplace=True) df = pd.concat([df, tmp_df]) df.to_csv(_quote_file_index, encoding='gbk', index=0) # A share daily quote _quote_file_stock = os.path.join(_root, f'stock_{date_}.csv') if (not fsutils.is_file(_quote_file_stock)) or ( fsutils.is_file(_quote_file_stock) and is_overwrite_): sql = f''' SELECT S_INFO_WINDCODE as symbol,S_DQ_PRECLOSE as pre_close,S_DQ_CLOSE as [close],S_DQ_CHANGE as change_price, S_DQ_PCTCHANGE as change_rate,S_DQ_VOLUME as volume,S_DQ_AMOUNT as amount,S_DQ_AVGPRICE as avg_price,S_DQ_TRADESTATUS as trade_status FROM [dbo].[ASHAREEODPRICES] where TRADE_DT='{date_}' ''' df = winddb.read(sql) df.to_csv(_quote_file_stock, encoding='gbk', index=0) # index future daily quote _quote_file_future = os.path.join(_root, f'future_{date_}.csv') if (not fsutils.is_file(_quote_file_future)) or ( fsutils.is_file(_quote_file_future) and is_overwrite_): sql = f''' SELECT S_INFO_WINDCODE as symbol, S_DQ_PRESETTLE as pre_settle, S_DQ_CLOSE as [close], S_DQ_SETTLE as settle, S_DQ_VOLUME as volume, S_DQ_AMOUNT as amount,S_DQ_OI as oi FROM [dbo].[CINDEXFUTURESEODPRICES] where TRADE_DT='{date_}' ''' df = winddb.read(sql) df.to_csv(_quote_file_future, encoding='gbk', index=0) # option daily quote _quote_file_option = os.path.join(_root, f'option_{date_}.csv') if (not fsutils.is_file(_quote_file_option)) or ( fsutils.is_file(_quote_file_option) and is_overwrite_): target_lists = ['510050.SH'] df = pd.DataFrame() for t in target_lists: ret = set_data( w.wset( "optiondailyquotationstastics", f"startdate={date_};enddate={date_};exchange=sse;windcode={t}" )) t_df = pd.DataFrame.from_dict(ret) t_df['symbol'] = t_df['option_code'] + t[-3:] t_df.rename(columns={ 'option_name': 'name', 'settlement_price': 'settle', 'change': 'change_rate' }, inplace=True) t_df['change_price'] = t_df['settle'] - t_df['pre_settle'] df = df.append(t_df, ignore_index=True) df.to_csv(_quote_file_option, encoding='gbk', index=0) # HK stock daily quote _quote_file_hks = os.path.join(_root, f'hkstock_{date_}.csv') if (not fsutils.is_file(_quote_file_hks)) or ( fsutils.is_file(_quote_file_hks) and is_overwrite_): y_date_ = calendar.get_trading_date(date_=date_, offset_=-1) sql = f''' SELECT distinct WindCode FROM [dbo].[JasperPosition] where trade_dt = '{y_date_}' and type='HKS' union SELECT distinct WindCode FROM [dbo].[JasperTradeDetail] where trade_dt = '{date_}' and type='HKS' ''' l_symbols = tradedb.read(sql)['WindCode'].values.tolist() ret = set_data( w.wss( f"{','.join(l_symbols)}", "windcode,pre_close,open,close,volume,amt,chg,pct_chg,trade_status", f"tradeDate={date_};priceAdj=U;cycle=D")) df = pd.DataFrame.from_dict(ret) df.rename(columns={ 'windcode': 'symbol', 'amt': 'amount', 'chg': 'change_price', 'pct_chg': 'change_rate' }, inplace=True) df.to_csv(_quote_file_hks, encoding='gbk', index=0) # decompress kline data for i in [1, 2]: file_name_ = f'{date_}-KLine-{i}.7z' fsutils.move_file( file_name_, from_=r'\\192.168.1.136\data\Wind\tdb\tdb-data-gx\2019', to_=r'D:\temp', replace_=True) _kline_files = os.path.join(r'D:\temp', file_name_) if (not fsutils.is_file(_kline_files)) or ( fsutils.is_file(_kline_files) and is_overwrite_): decompress_7Z( _kline_files, save_dir_=r'\\192.168.1.88\"Trading Share"\daily_quote\KLine') # fund daily quote _quote_file_fund = os.path.join(_root, f'fund_{date_}.csv') if (not fsutils.is_file(_quote_file_fund)) or ( fsutils.is_file(_quote_file_fund) and is_overwrite_): y_date_ = calendar.get_trading_date(date_=date_, offset_=-1) sql = f''' SELECT distinct WindCode FROM [dbo].[JasperPosition] where trade_dt = '{y_date_}' and type='F' union SELECT distinct WindCode FROM [dbo].[JasperTradeDetail] where trade_dt = '{date_}' and type='F' ''' l_symbols = tradedb.read(sql)['WindCode'].values.tolist() ret = set_data( w.wss( f"{','.join(l_symbols)}", "windcode,pre_close,open,close,volume,amt,chg,pct_chg,trade_status", f"tradeDate={date_};priceAdj=U;cycle=D")) ret['symbol'] = l_symbols df = pd.DataFrame.from_dict(ret) df.rename(columns={ 'amt': 'amount', 'chg': 'change_price', 'pct_chg': 'change_rate' }, inplace=True) df.to_csv(_quote_file_fund, encoding='gbk', index=0) # forex _quote_file_forex = os.path.join(_root, f'forex_{date_}.csv') if (not fsutils.is_file(_quote_file_forex)) or ( fsutils.is_file(_quote_file_forex) and is_overwrite_): BENCHMARK = ['HKDCNY.EX'] ret = set_data(w.wsd(BENCHMARK, "windcode,close", date_, date_, "")) df = pd.DataFrame.from_dict(ret) df.rename(columns={'windcode': 'symbol'}, inplace=True) df.to_csv(_quote_file_forex, index=0) w.close()
def close_windapi(): w.close()
def __exit__(self, exc_type, exc_val, exc_tb): w.close()
for i in dataset.index: if dataset.at[i,'MV'] <= 50000: timeneeded = 1 else: averageamount = getamountperminute(i) try: timeneeded = getmultipleof5(math.ceil(dataset.at[i, 'MV'] / averageamount)*5) except ValueError as e: print('代码为' + i + '的股票不能交易') else: twaptime = timeperiod(starttimeforbuy, timeneeded) df.loc[(df['证券代码'] == i) & (df['方向'] == 'buy'),'time'] = twaptime print('客户为'+client+'代码为' + i + '方向为' + df.loc[df['证券代码'] == i, '方向'] + '的Twap时间为' + twaptime) df['TwapPrice'].replace(0,np.nan) df['期末价格'].replace(0,np.nan) dfli.append(df) conn.close() result = pd.concat(dfli) w.close() wb = load_workbook(filepath) ws = wb['成交纪录'] for i in range(0,result.shape[0]): ws.cell(row=i+2, column=10).value = result.iloc[i,9] wb.save(filepath) wb.close()
def import_smfund_info(): w.start() types = { u'主动股票型分级母基金': 1000007766000000, u'被动股票型分级母基金': 1000007767000000, u'纯债券型分级母基金': 1000007768000000, u'混合债券型分级母基金': 1000007769000000, u'混合型分级母基金': 1000026143000000, u'QDII分级母基金': 1000019779000000 } # 获取各个历史时段的分级基金列表,并汇总全部基金代码 today = date.today().strftime('%Y-%m-%d') dates = ['2011-01-01', '2013-01-01', '2015-01-01', '2017-01-01'] # 分三个时间点获取市场上所有分级基金产品 df = pd.DataFrame() for date_p in dates: temp_df = wset_cache(w, "sectorconstituent", "date=%s;sectorid=1000006545000000" % date_p) df = df.append(temp_df) wind_code_all = df['wind_code'].unique() # 查询数据库,剔除已存在的基金代码 with get_db_session() as session: table = session.execute("select wind_code from wind_smfund_info") wind_code_existed = set([content[0] for content in table.fetchall()]) wind_code_new = list(set(wind_code_all) - wind_code_existed) wind_code_new = [code for code in wind_code_new if code.find('!') < 0] if len(wind_code_new) == 0: print('no sm fund fow import') else: info_df = wss_cache(w, wind_code_new, 'fund_setupdate, fund_maturitydate') if info_df is None: raise Exception('no data') info_df['FUND_SETUPDATE'] = info_df['FUND_SETUPDATE'].apply( lambda x: x.date()) info_df['FUND_MATURITYDATE'] = info_df['FUND_MATURITYDATE'].apply( lambda x: x.date()) info_df.rename(columns={ 'FUND_SETUPDATE': 'fund_setupdate', 'FUND_MATURITYDATE': 'fund_maturitydate' }, inplace=True) field = "fund_type,wind_code,sec_name,class_a_code,class_a_name,class_b_code,class_b_name,a_pct,b_pct,upcv_nav,\ downcv_nav,track_indexcode,track_indexname,max_purchasefee,max_redemptionfee" df = pd.DataFrame() for code in info_df.index: beginDate = info_df.loc[code, 'fund_setupdate'].strftime('%Y-%m-%d') temp_df = wset_cache( w, "leveragedfundinfo", "date=%s;windcode=%s;field=%s" % (beginDate, code, field)) df = df.append(temp_df) df.set_index('wind_code', inplace=True) df['tradable'] = df.index.map(lambda x: x if 'S' in x else None) # df.index = df.index.map(lambda x: x[:-2] + 'OF') info_df = info_df.join(df, how='outer') info_df.rename( columns={ 'a_nav': 'nav_a', 'b_nav': 'nav_b', 'a_fs_inc': 'fs_inc_a', 'b_fs_inc': 'fs_inc_b' }) info_df.index.rename('wind_code', inplace=True) engine = get_db_engine() info_df.to_sql('wind_smfund_info', engine, if_exists='append', index_label='wind_code', dtype={ 'wind_code': String(20), 'fund_setupdate': Date, 'fund_maturitydate': Date, 'fund_type': String(20), 'sec_name': String(50), 'class_a_code': String(20), 'class_a_name': String(50), 'class_b_code': String(20), 'class_b_name': String(50), 'track_indexcode': String(20), 'track_indexname': String(50), 'tradable': String(20), }) w.close()