def mirrorGrib(path): # 6.14.16测试 # 用来同步8-20/20-8的实况格点场数据气温(开式)、降水(累计)、湿度, 20时同步今天8时数据, 8时同步昨日20时数据 grid = Datainterface.GribData() now = datetime.datetime.now() elements, subdirs, localdirs, _, freq, *ftp = Writefile.readxml( path, 0) # freq应为None elements = elements.split(',') subdirs = subdirs.split(',') localdirs = localdirs.split(',') # 为三个文件夹目录 remote_urls = [ os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d')) for subdir in subdirs ] # 构造三个路径 for localdir, element, remote_url in zip(localdirs, elements, remote_urls): grid.mirror(element, remote_url, localdir, ftp) # 同步至每个文件夹,此处待测试,需保证范围在08-20或次日20-当日08时 # 查看各文件夹里数据信息,此处默认TEM, RAIN, RH 为08-20时的文件名列表 RAINs, RHs, TEMs = [ sorted(os.listdir(localdir)) for localdir in localdirs ] # 零时开始至今 e2tTems = [tem for tem in TEMs if int(tem[-7:-5]) in range(8, 21)] e2tRains = [rain for rain in RAINs if int(rain[-7:-5]) in range(8, 21)] e2tRhs = [rh for rh in RHs if int(rh[-7:-5]) in range(8, 21)] # 认为形状为同一分辨率下的[12, lat * lon] tem = [ Znwg.arrange(grid.readGrib(os.path.join(localdirs[2], TEM))) for TEM in e2tTems ] # temdata 包含四个要素(data, lat, lon, size), 全国范围,需插值到青海 lat, lon = tem[0][1], tem[0][2] temdata = np.array([ np.nan_to_num( interp.interpolateGridData(t[0] - 273.15, lat, lon, glovar.lat, glovar.lon)) for t in tem ]) raindata = np.array([ np.nan_to_num( interp.interpolateGridData( Znwg.arrange(grid.readGrib(os.path.join(localdirs[0], RAIN)))[0], lat, lon, glovar.lat, glovar.lon)) for RAIN in e2tRains ]) rhdata = np.array([ np.nan_to_num( interp.interpolateGridData( Znwg.arrange(grid.readGrib(os.path.join(localdirs[1], RH)))[0], lat, lon, glovar.lat, glovar.lon)) for RH in e2tRhs ]) return temdata, raindata, rhdata
def upSnowele(gbpath, sktpath): # 处理积雪历史要素文件, gbpath为格点要素pkl存放路径, sktpath为cimiss插值pkl路径 grid = Datainterface.GribData() gbNlist, sktList = sorted(os.listdir(gbpath))[-5:], sorted( os.listdir(sktpath))[-5:] #使用正则优化 # 获取每天11时到17时, 23时到05时的数据,考虑取最后 eleData = [ Znwg.arrange(grid.readGrib(os.path.join(gbpath, gblist))) for gblist in gbNlist ] sktData = [ Znwg.arrange(grid.readGrib(os.path.join(gbpath, sktlist))) for sktlist in sktList ] return np.array(eleData), np.array(sktData)
def clcindex(data, path): indexpath = Writefile.readxml(path, 6) trafficindex = [np.max(data[i], axis=0) for i in range(56)] fname = ['%03d' % i for i in range(3, 169, 3)] filetime = Znwg.znwgtime() Writefile.write_to_csv(indexpath, trafficindex, 'trafficindex', fname, filetime)
def readIndex(path): """ 从不同的路径中读取指数文件,几个路径则表明返回几个dataframe :return: 含Dataframe的list-->[Dataframe, Dataframe ...] """ allindexpath = Znwg.regex(path, 6) ''' tree = ET.parse(path) root = tree.getroot() allindexpath = [i.text for i in root[-1]] # 这个应该仅确定目录,通过正则来确定具体的文件名参数 ''' print(allindexpath) ''' allfname = [] # allfname应返回多个列表 for i in range(len(allindexpath)): fnames = regex(allindexpath[i]) allfname.append(fnames) ''' allfname = [regex(index) for index in allindexpath] print(allfname) windpath, icepath, floodpath = [], [], [] windvalue, icevalue, floodvalue = [], [], [] for i in range(len(allfname[0])): windpath.append(os.path.join(allindexpath[0], allfname[0][i])) windvalue.append( pd.read_csv(os.path.join(allindexpath[0], allfname[0][i]))) icepath.append(os.path.join(allindexpath[1], allfname[1][i])) icevalue.append( pd.read_csv(os.path.join(allindexpath[1], allfname[1][i]))) floodpath.append(os.path.join(allindexpath[2], allfname[2][i])) floodvalue.append( pd.read_csv(os.path.join(allindexpath[2], allfname[2][i]))) return windvalue, icevalue, floodvalue
def predepth(): # 前一时刻积水深度,此处需在服务器端测试优化 dr = np.zeros(shape=(801, 1381)) # 目前默认前一时刻积水深度为0 now = datetime.datetime.now() znwgtm = Znwg.znwgtime() *_, ftp = Writefile.readxml(glovar.trafficpath, 1) grib = Datainterface.GribData() remote_url = os.path.join(r'\\ANALYSIS\\CMPA', now.strftime('%Y'), now.strftime('%Y%m%d')) localdir = r'/home/cqkj/QHTraffic/Product/Product/mirror/rainlive' grib.mirror('FRT_CHN_0P05_3HOR', remote_url, localdir, ftp) rname = sorted(os.listdir(localdir))[-1] rpath = localdir + rname data, lat, lon, _ = Znwg.arrange((grib.readGrib(rpath))) data = interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon) dataset = data[np.newaxis, ] # 符合形状要求 res = FloodModel.cal2(dataset, dr) return res[0]
def mirrorskgrib(path): # 6月15待测试 # 还需写一个同步实况小时所需数据的代码(包括三网格), 滞后15分钟,可使用同一个config文件 grid = Datainterface.GribData() now = datetime.datetime.now() elements, subdirs, localdirs, _, freq, *ftp = Writefile.readxml( path, 0) # freq应为None elements = elements.split(',') subdirs = subdirs.split(',') localdirs = localdirs.split(',') # 为三个文件夹目录 remote_urls = [ os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d')) for subdir in subdirs ] # 构造三个路径 for localdir, element, remote_url in zip(localdirs, elements, remote_urls): grid.mirror(element, remote_url, localdir, ftp) # 同步至每个文件夹,此处待测试,需保证范围在08-20或次日20-当日08时 # 查看各文件夹里数据信息,此处默认TEM, RAIN, RH 为08-20时的文件名列表 RAIN, RH, TEM = [ sorted(os.listdir(localdir))[-1] for localdir in localdirs ] # 零时开始至今 tem = Znwg.arrange(grid.readGrib(os.path.join(localdirs[2], TEM))) lat, lon = tem[1], tem[2] temdata = np.array( np.nan_to_num( interp.interpolateGridData(tem[0] - 273.15, lat, lon, glovar.lat, glovar.lon))) raindata = np.array( np.nan_to_num( interp.interpolateGridData( Znwg.arrange(grid.readGrib(os.path.join(localdirs[0], RAIN)))[0], lat, lon, glovar.lat, glovar.lon))) rhdata = np.array( np.nan_to_num( interp.interpolateGridData( Znwg.arrange(grid.readGrib(os.path.join(localdirs[1], RH)))[0], lat, lon, glovar.lat, glovar.lon))) Time = datetime.datetime.now().strftime('%Y%m%d%H') savepath = ''.join(r'/home/cqkj/QHTraffic/tmp/ele', Time, r'.pkl') # 存储每个时刻的降水、湿度、温度 with open(savepath, 'wb') as f: # 文件名称用时间区分,精确到小时 pickle.dump([temdata, raindata, rhdata], f) return temdata, raindata, rhdata '''
def Weatherdata(path): # ????????????????????? elements, subdirs, localdir, _, freq, *ftp = Writefile.readxml(path, 1) now = datetime.datetime.now() elements = elements.split(',') subdirs = subdirs.split(',') remote_urls = [ os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d')) for subdir in subdirs ] # ?????? grib = Datainterface.GribData() ''' [grib.mirror(element, remote_url, localdir, ftp, freq=freq) for element, remote_url in zip(elements[:-1], remote_urls[:-1])] # ???????????????????????????(24003) ''' for element, remote_url in zip(elements[:-1], remote_urls[:-1]): grib.mirror(element, remote_url, localdir, ftp, freq=freq) grib.mirror(elements[-1], remote_urls[-1], localdir, ftp, freq='24024') # ??????????? # ?????????????????????????????????????pattern strings = ','.join(os.listdir(localdir)) patterns = [ r'(\w+.EDA.*?.GRB2)', r'(\w+.ERH.*?.GRB2)', r'(\w+.TMP.*?.GRB2)', r'(\w+.ER24.*?.GRB2)' ] allpath = [ localdir + sorted(Znwg.regex(pattern, strings), key=str.lower)[-1] for pattern in patterns ] # allpath????????????????????? ele14list = slice(1, 74, 8) # ??+2-1??????????????10?????14????????? ####????????wind????u???v?? wind = grib.readGrib(allpath[0])[0] windu_v = np.array([v for _, v in wind.items()]) windu, windv = windu_v[::2][ele14list], windu_v[1::2][ele14list] data = np.array([ Znwg.arrange(grib.readGrib(path))[0][ele14list] for path in allpath[1:-1] ]) # ????????????????? #er, lat, lon, size = Znwg.arrange(grib.readGrib(allpath[-1], nlat=glovar.lat, nlon=glovar.lon)) # ??????????????????????????????????????? er, lat, lon, size = Znwg.arrange( [grib.readGrib(allpath[-1], nlat=glovar.latt, nlon=glovar.lonn)][0]) result = windu, windv, *data, er # ??????????[4,10,181,277]???? return result, lat, lon
def snowdepth(path): # 6.18测试 now = Znwg.znwgtime().strftime('%Y%m%d%H') pattern = r'(' + now + '.*?.nc)' strings = os.listdir(mkdirdate.dataMdir(path)) namelist = sorted(Znwg.regex(pattern, strings)) os.chdir(mkdirdate.dataMdir(path)) datasets = xr.open_mfdataset(namelist, concat_dim='time') data = datasets.SnowDepth.values newdata = [] for i in range(0, 7): #print(i * 8, (i + 1) * 8) tmp = np.mean(data[i * 8:(i + 1) * 8], axis=0) #print(tmp.shape) newdata.append(tmp) newdata.extend(newdata[-3:]) newdata = np.array(newdata) newdata = np.piecewise(newdata, [newdata < 0.1, newdata >= 0.1], [1, 0]) return newdata
def rainData(): # 同步降雨智能网格文件并解析 now = datetime.datetime.now() *_, elements, ftp = Writefile.readxml(glovar.trafficpath, 1) #*_, elements, ftp = Writefile.readxml(r'/home/cqkj/LZD/Product/Product/config/Traffic.xml', 5) element = elements.split(',') ftp = ftp.split(',') grib = Datainterface.GribData() remote_url = os.path.join(r'\\SPCC\\BEXN', now.strftime('%Y'), now.strftime('%Y%m%d')) grib.mirror(element[0], remote_url, element[1], ftp, element[2]) rname = sorted(os.listdir(element[1]))[-1] rpath = element[1] + rname dataset, lat, lon, _ = Znwg.arrange((grib.readGrib(rpath))) # result包含data,lat,lon,size return [interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon) for data in dataset[:56]]
def windData(path): # 获取数据信息 *_, elements, ftp = Writefile.readxml(path, 2) element = elements.split(',') ftp = ftp.split(',') grib = Datainterface.GribData() remote_url = os.path.join(r'\\SPCC\\BEXN', glovar.now.strftime('%Y'), glovar.now.strftime('%Y%m%d')) grib.mirror(element[0], remote_url, element[1], ftp, element[2]) rname = sorted(os.listdir(element[1]))[-1] rpath = element[1] + rname dataset, lat, lon, _ = Znwg.arrange( (grib.readGrib(rpath))) # result包含data,lat,lon,size return [ interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon) for data in dataset ] # 返回插值后列表格式数据
def mirror(self, element, remote_url, localdir, freq, *args): """ 从服务器上同步数据 :param element: 需获取的元素名称 eg:ER03、TMAX :param path: 远程服务器下文件的路径信息,用以构造remote_url :param localdir: 本机用来存放同步的grib文件目录 :param freq: 同步文件的时间分辨率信息,eg24003,24024 :param args: 服务器名称、用户名、密码 :return: """ print(localdir) print(222222222222222222222) print(freq) print(args[0][0]) print(1111111111111111111111111111111) ftp_url, user, password = args[0] #print(ftp_url, user, password) initnal_time = Znwg.znwgtime() print(initnal_time) if freq: cmd = '''lftp -c "open {ftp} -u {user},{password}; lcd {localdir}; cd {remote_url};mirror --no-recursion -I *{element}_{init_time:%Y%m%d%H%M}_{freq}.GRB2" '''.format( ftp=ftp_url, user=user, password=password, localdir=localdir, remote_url=remote_url, element=element, init_time=initnal_time, freq=freq) else: cmd = '''lftp -c "open {ftp} -u {user},{password}; lcd {localdir}; cd {remote_url};mirror --no-recursion -I *{element}-{init_time:%Y%m%d%H}.GRB2" '''.format( ftp=ftp_url, user=user, password=password, localdir=localdir, remote_url=remote_url, element=element, init_time=initnal_time) print(cmd) cmd try: os.system(cmd) except: traceback.print_exc()
def liverain(path, pklpath): # ???????ZNWG???????????????pickle # ???????????????????????????? elements, _, localdir, historydir, freq, *ftp = Writefile.readxml(path, 1) now = datetime.datetime.now() ytd = now - datetime.timedelta(days=1) dir = r'//ANALYSIS//CMPA//0P05' remote_url = os.path.join(dir, now.strftime('%Y'), now.strftime('%Y%m%d')) grb = Datainterface.GribData() grb.mirror('FAST_CHN_0P05_DAY-PRE', remote_url, localdir, ftp, freq=None) # ?????????????? rainpaths = sorted(os.listdir(localdir))[-1] os.chdir(localdir) rainlive, lat, lon, res = Znwg.arrange( [grb.readGrib(rainpaths, nlat=glovar.latt, nlon=glovar.lonn)][0]) ####?????????????????? with open(pklpath, 'rb') as f: data = pickle.load(f) data.append(rainlive) # ????deque???? with open(pklpath, 'wb') as f: pickle.dump(rainlive, f) return rainlive
# 此模块保存Product package 共享变量 import numpy as np import datetime as dt from Dataprocess import Znwg import pandas as pd # 一维数组,用来保存NC文件 lat = np.arange(31, 40.001, 0.01) lon = np.arange(89, 103.001, 0.01) # 二维网格,用以进行格点插值 longrid, latgrid = np.meshgrid(lon, lat) # 国家级降雨数据经纬度范围裁剪 # 国家级降雨数据经纬度范围裁剪, 按照zhwg文件范围裁剪 latt = np.linspace(31.4, 39.4, 161) lonn = np.linspace(89.25, 102.95, 274) # 写入NC文件必要信息 now = dt.datetime.now() filetime = Znwg.znwgtime().strftime('%Y%m%d%H%M') fh = range(3, 169, 3) fnames = ['_%03d' % i for i in fh] # 配置文件路径及道路文件路径 windpath = r'/home/cqkj/QHTraffic/Product/Product/Source/Road_wind.csv' roadpath = r'/home/cqkj/QHTraffic/Product/Product/Source/QHroad_update.csv' trafficpath = r'/home/cqkj/QHTraffic/Product/Product/config/Traffic.xml' forestpath = r'/home/cqkj/QHTraffic/Product/Product/config/forest.xml'