def clcindex(data): # 计算出交通风险等级并写入 indexpath = Writefile.readxml(glovar.trafficpath, 6)[-1] trafficindex = [np.max(data[i], axis=0) for i in range(56)] fname = ['%03d' % i for i in range(3, 169, 3)] filetime = Znwg.znwgtime() Writefile.write_to_csv(indexpath, trafficindex, 'TrafficIndex', fname, filetime)
def __init__(self, cpath): self.m1path, self.m2path, self.savepath, self.roadpath, self.indexpath = Writefile.readxml( cpath, 1) self.dics = Writefile.readxml(cpath, 2)[0].split(',') # 道路经纬度坐标 self.new_lat = glovar.roadlat self.new_lon = glovar.roadlon
def clcindex(data, path): indexpath = Writefile.readxml(path, 0) trafficindex = [np.max(data[i], axis=0) for i in range(56)] fname = ['%03d' % i for i in range(3, 169, 3)] filetime = Znwg.znwgtime() Writefile.write_to_csv(indexpath, trafficindex, 'TrafficIndex', fname, filetime)
def write(path, data, name, lat=None, lon=None, type=0): filetime = ecmwf.ecreptime() fh = range(3, 169, 3) fnames = ['_%03d' % i for i in fh] if type == 0: Writefile.write_to_nc(path, data, lat, lon, name, fnames, filetime) else: Writefile.write_to_csv(path, data, name, fnames, filetime)
def __init__(self, new_lat, new_lon): """ 初始化参数 :param dics:计算积雪深度所需要素 """ cpath = r'/home/cqkj/QHTraffic/Product/Source/snowconfig.xml' self.m1path, self.m2path, self.savepath, self.roadpath, self.indexpath = Writefile.readxml( cpath, 1) self.dics = Writefile.readxml(cpath, 2)[0].split(',')
def write(self, data, lat=None, lon=None, type=0): filetime = ecmwf.ecreptime() fh = range(3, 169, 3) fnames = ['_%03d' % i for i in fh] name = 'Snow' if type == 0: Writefile.write_to_nc(self.savepath, data, lat, lon, name, fnames, filetime) else: Writefile.write_to_csv(self.indexpath, data, 'SnowIndex', fnames, filetime)
def reverse(saltedata, dataset, snowdepth): """ # 加载模型生成积雪深度模型结果 :param saltedata: 卫星数据 :param dataset: ec气象要素数据 :return: """ tmp = [data.reshape(-1, 1) for data in dataset] # 转换基础要素 ele = np.concatenate(tmp, axis=1) ele.resize(56, 801 * 1381, 4) # 转换形状,将上一时刻积雪输入 temp = np.nan_to_num(ele) snowdepth = snowdepth.reshape(-1, 1) # 积雪深度数据,仅包含前一时刻 m1, m2, savepath, roadpath, indexpath, _ = Writefile.readxml( glovar.trafficpath, 0)[0].split(',') m2 = r'/home/cqkj/LZD/Product/Product/Source/snow.pickle' if saltedata is not None: with open(m1, 'rb') as f: model1 = pickle.load(f) ######################################### saltedata.resize(801 * 1381, 1) typecode = 1 else: with open(m2, 'rb') as f: model2 = pickle.load(f) typecode = 2 alldata = [] ################################################ for i in range(56): # temp = [data.reshape(-1, 1) for data in dataset[i]] # 仅包含基础要素 # newdataset = np.concatenate([temp, snowdepth, saltedata], axis=1) if typecode == 1: newdataset = np.concatenate([temp[i], snowdepth, saltedata], axis=1) prediction = np.array(model1.predict(newdataset)) # 每轮结果 if typecode == 2: #print(presnow.shape) # 此处预报结果不可用图像呈现出分块 newdataset = np.concatenate([temp[i], snowdepth], axis=1) prediction = np.array(model2.predict(newdataset)) # 每轮结果 predictions = np.nan_to_num(prediction) print(predictions.shape) snowdepth = predictions[:, np.newaxis] # 结果作为下一次预测的输入 predictions.resize(len(glovar.lat), len(glovar.lon)) sdgrid = np.nan_to_num(predictions) sdgrid[sdgrid < 0] = 0 alldata.append(sdgrid) sp = r'/data/traffic/snow//' Writefile.write_to_nc(sp, np.array(alldata), glovar.lat, glovar.lon, 'snowdepth', glovar.fnames, glovar.filetime) return np.array(alldata) # 返回 [56, 801, 1381]网格数据
def readIndex(): """ 从不同的路径中读取指数文件,几个路径则表明返回几个dataframe :return: 含Dataframe的list-->[Dataframe, Dataframe ...] """ allindexpath = Writefile.readxml(glovar.trafficpath, 6) ''' tree = ET.parse(path) root = tree.getroot() allindexpath = [i.text for i in root[-1]] # 这个应该仅确定目录,通过正则来确定具体的文件名参数 ''' print(allindexpath) ''' allfname = [] # allfname应返回多个列表 for i in range(len(allindexpath)): fnames = regex(allindexpath[i]) allfname.append(fnames) ''' allfname = [regex(index) for index in allindexpath] print(allfname) windpath, icepath, floodpath = [], [], [] windvalue, icevalue, floodvalue = [], [], [] for i in range(len(allfname[0])): windpath.append(os.path.join(allindexpath[0], allfname[0][i])) windvalue.append(pd.read_csv(os.path.join(allindexpath[0], allfname[0][i]))) icepath.append(os.path.join(allindexpath[1], allfname[1][i])) icevalue.append(pd.read_csv(os.path.join(allindexpath[1], allfname[1][i]))) floodpath.append(os.path.join(allindexpath[2], allfname[2][i])) floodvalue.append(pd.read_csv(os.path.join(allindexpath[2], allfname[2][i]))) return windvalue, icevalue, floodvalue
def Weatherdata(path): # ????????????????????? elements, subdirs, localdir, _, freq, *ftp = Writefile.readxml(path, 1) now = datetime.datetime.now() elements = elements.split(',') subdirs = subdirs.split(',') remote_urls = [os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d')) for subdir in subdirs] # ?????? grib = Datainterface.GribData() ''' [grib.mirror(element, remote_url, localdir, freq, ftp) for element, remote_url in zip(elements[:-1], remote_urls[:-1])] # ???????³†??????????????????(24003) ''' for element, remote_url in zip(elements[:-1], remote_urls[:-1]): grib.mirror(element, remote_url, localdir, freq, ftp) grib.mirror(elements[-1], remote_urls[-1], localdir, '24024', ftp) # ??????????? # ???????????????????§Ò????????????????pattern strings = ','.join(os.listdir(localdir)) patterns = [r'(\w+.EDA.*?.GRB2)', r'(\w+.ERH.*?.GRB2)', r'(\w+.TMP.*?.GRB2)', r'(\w+.ER24.*?.GRB2)'] allpath = [localdir + sorted(Znwg.regex(pattern, strings), key=str.lower)[-1] for pattern in patterns] # allpath??????????????????§Ò? ele14list = slice(1, 74, 8) # ??+2-1??????????¦Ä??10?????14????????? ####????????wind????u???v?? wind = grib.readGrib(allpath[0])[0] windu_v = np.array([v for _, v in wind.items()]) windu, windv = windu_v[::2][ele14list], windu_v[1::2][ele14list] data = np.array([Znwg.arrange(grib.readGrib(path))[0][ele14list] for path in allpath[1:-1]]) # ????????????????? #er, lat, lon, size = Znwg.arrange(grib.readGrib(allpath[-1], nlat=glovar.lat, nlon=glovar.lon)) # ???????????????????¦Ã?????????????????? er, lat, lon, size = Znwg.arrange([grib.readGrib(allpath[-1], nlat=glovar.latt, nlon=glovar.lonn)][0]) result = windu, windv, data, er # ??????????[4,10,181,277]???? return result, lat, lon
def snowData(): # 获取ec数据信息(气温、降水、地温、湿度、积雪深度) ectime = ecmwf.ecreptime() fh = [i for i in range(12, 181, 3)] # 20点的预报获取今天8:00的ec预报 # *_, dics = Writefile.readxml(glovar.trafficpath, 0) *_, dics = Writefile.readxml( r'/home/cqkj/LZD/Product/Product/config/Traffic.xml', 0) dicslist = dics.split(',')[:-1] lonlatset, dataset = [], [] for dic in dicslist: newdata = [] lon, lat, data = Datainterface.micapsdata(ectime, dic, fh) lonlatset.append((lon, lat)) for i in range(data.shape[0] - 1): if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]): data[i] = data[i + 1] / 2 data[i + 1] = data[i + 1] / 2 newdata.append( interp.interpolateGridData(data[i], lat, lon, glovar.lat, glovar.lon)) else: newdata.append( interp.interpolateGridData(data[i], lat, lon, glovar.lat, glovar.lon)) newdata = np.array(newdata) # newdata[newdata<0] = 0 # 保证数据正确性 dataset.append(newdata) # 保存插值后的数据集 return np.array(dataset)
def main(): saltedata = saltedata(path) snowpre = np.random.randint(0, 1, size=(801 * 1381, 1)) snow = SnowDepth() rep = ecmwf.ecreptime() fh = [i for i in range(12, 181, 3)] region = [ float(i) for i in ','.join( Writefile.readxml( r'/home/cqkj/QHTraffic/Product/Traffic/SNOD/config.xml', 0)).split(',') ] new_lon = np.arange(region[0], region[2], region[-1]) new_lat = np.arange(region[1], region[3], region[-1]) lonlatset, dataset = [], [] # 提取数据及经纬度(双重循环,看能否改进) for dic in snow.dics: lon, lat, data = Datainterface.micapsdata(rep, dic, fh) lonlatset.append((lon, lat)) for i in range(data.shape[0] - 1): if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]): data[i] = data[i + 1] / 2 data[i + 1] = data[i + 1] / 2 interp.interpolateGridData(data, lat, lon, new_lat, new_lon) else: interp.interpolateGridData(data, lat, lon, new_lat, new_lon) dataset.append(data) # 保存插值后的数据集 depthgrid = snow.clcsd(dataset, new_lat, new_lon, saltedata, snowpre) snow.write(depthgrid, new_lat, new_lon) dangerindex = snow.clcindex(depthgrid, new_lat, new_lon) snow.write(dangerindex, type=1)
def filelist(path): now = dt.datetime.now().strftime('%Y%m%d') pattern = r'(' + now + ')' rpath, spath, wpath, icpath, savepath = Writefile.readxml(path, 2) pathlists = [rpath, spath, wpath, icpath] # 获取出四个智能网格数据列表 elements = [Znwg.regex(pattern, os.listdir(path)) for path in pathlists] return elements
def main(): snowpath, gpath, fpath, rainpath, savepath = Writefile.readxml( glovar.forestpath, 0) # 积雪nc数据存放位置 snow = snowdepth(snowpath) # 积雪数据[10, 801, 1381] data, *_ = Weatherdata(glovar.forestpath) # 森林火险数据 ldtype = landtype(gpath, fpath) gindex, findex, mindex = firelevel(data, rainpath, snow, ldtype) # 最终生成指数 filetime = ecmwf.ecreptime() fh = range(10) fnames = ['_%03d' % i for i in fh] Writefile.write_to_nc(savepath, gindex, filetime=filetime, fnames=fnames, lat=glovar.lat, lon=glovar.lon, name='green') Writefile.write_to_nc(savepath, findex, filetime=filetime, fnames=fnames, lat=glovar.lat, lon=glovar.lon, name='forest') Writefile.write_to_nc(savepath, mindex, filetime=filetime, fnames=fnames, lat=glovar.lat, lon=glovar.lon, name='meteo')
def main(): # 计算出来灾害落区 # dataset 为获取的各气象要素等级预报 ######读取nc要素文件##### varname = ['Rain', 'Snow_depth', 'Wind', 'Roadic'] datasets = [ xr.open_mfdataset(path, concat_dim='time').values for path, name in zip(filelist(), varname) ] rain, snow, wind, roadic = datasets roadic *= 8 snow *= 4 wind *= 2 rain *= 1 ######################## disaster = roadic + snow + wind + rain ######################## configpath = r'../config/disaster.xml' savepath = Writefile.readxml(configpath, 1)[-1] filetime = ecmwf.ecreptime() fh = range(3, 169, 3) fnames = ['_%03d' % i for i in fh] Writefile.write_to_nc(savepath, disaster, glovar.lat, glovar.lon, 'Disaster', fnames, filetime)
def Weatherdata(path): # 获取森林火险所需气象数据 elements, subdirs, localdir, _, freq, *ftp = Writefile.readxml(path, 1) now = datetime.datetime.now() elements = elements.split(',') subdirs = subdirs.split(',') remote_urls = [ os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d')) for subdir in subdirs ] # 待构造 grib = Datainterface.GribData() ''' [grib.mirror(element, remote_url, localdir, freq, ftp) for element, remote_url in zip(elements[:-1], remote_urls[:-1])] # 同时同步大风、相对湿度、气温要素数据(24003) ''' for element, remote_url in zip(elements[:-1], remote_urls[:-1]): grib.mirror(element, remote_url, localdir, freq, ftp) grib.mirror(elements[-1], remote_urls[-1], localdir, '24024', ftp) # 同步出降水要素 # 此处应改为提取出不同要素列表,目前简单实现,构造四个pattern strings = ','.join(os.listdir(localdir)) patterns = [ r'(\w+.EDA.*?.GRB2)', r'(\w+.ERH.*?.GRB2)', r'(\w+.TMP.*?.GRB2)', r'(\w+.ER24.*?.GRB2)' ] allpath = [ localdir + sorted(Znwg.regex(pattern, strings), key=str.lower)[-1] for pattern in patterns ] # allpath应为四个最新同步的文件列表 ele14list = slice(1, 74, 8) # (+2-1)前三个要素未来10天每天14时数据索引 ####第一个要素wind包含u风和v风 wind = grib.readGrib(allpath[0])[0] windu_v = np.array([v for _, v in wind.items()]) windu, windv = windu_v[::2][ele14list], windu_v[1::2][ele14list] data = np.array([ Znwg.arrange(grib.readGrib(path))[0][ele14list] for path in allpath[1:-1] ]) # 读取出前三项数据信息 #er, lat, lon, size = Znwg.arrange(grib.readGrib(allpath[-1], nlat=glovar.lat, nlon=glovar.lon)) # 降水为国家级资料,先查看经纬度信息是否与前三者一致 er, lat, lon, size = Znwg.arrange( [grib.readGrib(allpath[-1], nlat=glovar.latt, nlon=glovar.lonn)][0]) result = windu, windv, data, er # 最终数据应为[4,10,181,277]矩阵 return result, lat, lon
def liverain(path, pklpath): # ???????ZNWG????????????›¥?pickle # ???????????????????????????? elements, _, localdir, historydir, freq, *ftp = Writefile.readxml(path, 1) now = datetime.datetime.now() ytd = now - datetime.timedelta(days=1) dir = r'/SCMOC/BEXN' remote_url = os.path.join(dir, ytd.strftime('%Y'), ytd.strftime('%Y%m%d')) grb = Datainterface.GribData() grb.mirror('ER24', remote_url, localdir, '24024', ftp) # ?????????????? rainpath = sorted(os.listdir(localdir))[-1] os.chdir(localdir) rainlive, lat, lon, res = Znwg.arrange([grb.readGrib(rainpath, nlat=glovar.latt, nlon=glovar.lonn)][0]) ####?????????????????? with open(pklpath, 'rb') as f: data = pickle.load(f) data.append(rainlive) # §Õ??deque???? with open(pklpath, 'wb') as f: pickle.dump(data, f) return rainlive
def main(): ice = Roadic() rep = ecmwf.ecreptime() fh = [i for i in range(12, 181, 3)] ''' region = [float(i) for i in ','.join(Writefile.readxml(r'/home/cqkj/QHTraffic/Product/Traffic/ROADIC/config.xml', 0)).split(',')] new_lon = np.arange(region[0], region[2], region[-1]) new_lat = np.arange(region[1], region[3], region[-1]) ''' dataset = icele() icgrid = ice.icegrid(dataset, glovar.lat, glovar.lon) # savepath, indexpath = Writefile.readxml(r'/home/cqkj/QHTraffic/Product/Traffic/ROADIC/config.xml', 1)[2:] savepath, indexpath, cmpath, _ = Writefile.readxml(glovar.trafficpath, 4)[2:] # write(savepath, icgrid, 'Roadic', new_lat, new_lon) # 先保存厚度网格数据 write(savepath, icgrid, 'Roadic', glovar.lat, glovar.lon) # iceroad = ice.depth2onezero(icgrid, new_lat, new_lon) iceroad = ice.depth2onezero(icgrid, glovar.lat, glovar.lon) ################################################################################ # 获取cimiss数据集,此处仅为读取,实况数据获取及保存由另一程序实现 cmissdata = np.loadtxt(cmpath, delimiter=',') icedays = RoadIceindex(cmissdata, iceroad) roadicing = icedays.iceday() write(indexpath, roadicing, 'RoadicIndex', type=1)
def __init__(self): self._path = glovar.trafficpath self.mpath, self.roadpath, *_ = Writefile.readxml(self._path, 4)