def main(): ice = Roadic() rep = ecmwf.ecreptime() fh = [i for i in range(12, 181, 3)] region = [float(i) for i in ','.join(Writefile.readxml(glovar.trafficpath, 0)).split(',')] new_lon = np.arange(region[0], region[2], region[-1]) new_lat = np.arange(region[1], region[3], region[-1]) lonlatset, dataset = [], [] # 提取数据及经纬度(双重循环,看能否改进) for dic in ice.dics: lon, lat, data = Datainterface.micapsdata(rep, dic, fh) lonlatset.append((lon, lat)) for i in range(data.shape[0] - 1): if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]): data[i] = data[i + 1] / 2 data[i+1] = data[i + 1] / 2 interp.interpolateGridData(data,lat,lon,new_lat, new_lon) else: interp.interpolateGridData(data, lat, lon,new_lat, new_lon) dataset.append(data) # 保存插值后的数据集 icgrid = ice.icegrid(dataset, new_lat, new_lon) savepath, indexpath = Writefile.readxml(glovar.trafficpath, 1)[2:] write(savepath, icgrid, 'Roadic', new_lat, new_lon) # 先保存厚度网格数据 iceroad = ice.depth2onezero(icgrid, new_lat, new_lon) ################################################################################ # 获取cimiss数据集,此处仅为读取,实况数据获取及保存由另一程序实现 cmissdata = np.loadtxt('/home/cqkj/QHTraffic/qhroadic/cmsk.csv', delimiter=',') icedays = RoadIceindex(cmissdata, iceroad) roadicing = icedays.iceday() write(indexpath, roadicing, 'RoadicIndex', type=1)
def __init__(self, new_lat, new_lon): """ 初始化参数 :param dics:计算积雪深度所需要素 """ cpath = r'/home/cqkj/QHTraffic/Product/Source/snowconfig.xml' self.m1path, self.m2path, self.savepath, self.roadpath, self.indexpath = Writefile.readxml( cpath, 1) self.dics = Writefile.readxml(cpath, 2)[0].split(',')
def revise(message): # 订正解析结果,并插值到1km分辨率 mdpath, gcpath, savepath, *_ = Writefile.readxml(glovar.trafficpath, 1) net = torch.load(mdpath) dem = pd.read_csv(gcpath, index_col=0).values arrays = np.array( [np.nan_to_num([data, dem]) for data in message[:, :801, :1381]]) inputs = torch.from_numpy(arrays) # torch.no_grad() outputs = [net(it[np.newaxis, :]).detach().numpy() for it in inputs] outputs = np.nan_to_num(outputs) outputs[outputs < 0] = 0 print(outputs.shape) output = np.squeeze(outputs) lat = np.linspace(31.4, 39.4, 801) lon = np.linspace(89.3, 103.1, 1381) raingb = np.array([ np.nan_to_num( interp.interpolateGridData(op, lat, lon, glovar.lat, glovar.lon)) for op in output ]) Writefile.write_to_nc(savepath, raingb, glovar.lat, glovar.lon, 'Rain', glovar.fnames, glovar.filetime) return outputs
def main(): saltedata = saltedata(path) snowpre = np.random.randint(0, 1, size=(801 * 1381, 1)) snow = SnowDepth() rep = ecmwf.ecreptime() fh = [i for i in range(12, 181, 3)] region = [ float(i) for i in ','.join( Writefile.readxml( r'/home/cqkj/QHTraffic/Product/Traffic/SNOD/config.xml', 0)).split(',') ] new_lon = np.arange(region[0], region[2], region[-1]) new_lat = np.arange(region[1], region[3], region[-1]) lonlatset, dataset = [], [] # 提取数据及经纬度(双重循环,看能否改进) for dic in snow.dics: lon, lat, data = Datainterface.micapsdata(rep, dic, fh) lonlatset.append((lon, lat)) for i in range(data.shape[0] - 1): if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]): data[i] = data[i + 1] / 2 data[i + 1] = data[i + 1] / 2 interp.interpolateGridData(data, lat, lon, new_lat, new_lon) else: interp.interpolateGridData(data, lat, lon, new_lat, new_lon) dataset.append(data) # 保存插值后的数据集 depthgrid = snow.clcsd(dataset, new_lat, new_lon, saltedata, snowpre) snow.write(depthgrid, new_lat, new_lon) dangerindex = snow.clcindex(depthgrid, new_lat, new_lon) snow.write(dangerindex, type=1)
def iceData(): # 获取ec数据信息(气温、降水、地温、湿度、积雪深度) ectime = ecmwf.ecreptime() fh = [i for i in range(12, 181, 3)] # 20点的预报获取今天8:00的ec预报 *_, dics = Writefile.readxml(glovar.trafficpath, 4) dicslist = dics.split(',') lonlatset, dataset = [], [] for dic in dicslist: newdata = [] lon, lat, data = Datainterface.micapsdata(ectime, dic, fh) lonlatset.append((lon, lat)) for i in range(data.shape[0] - 1): if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]): data[i] = data[i + 1] / 2 data[i + 1] = data[i + 1] / 2 newdata.append( interp.interpolateGridData(data[i], lat, lon, glovar.lat, glovar.lon)) else: newdata.append( interp.interpolateGridData(data[i], lat, lon, glovar.lat, glovar.lon)) newdata = np.array(newdata) # newdata[newdata<0] = 0 # 保证数据正确性 dataset.append(newdata) # 保存插值后的数据集 return np.array(dataset)
def clcindex(data, path): indexpath = Writefile.readxml(path, 6) trafficindex = [np.max(data[i], axis=0) for i in range(56)] fname = ['%03d' % i for i in range(3, 169, 3)] filetime = Znwg.znwgtime() Writefile.write_to_csv(indexpath, trafficindex, 'trafficindex', fname, filetime)
def reverse(saltedata, dataset, snowdepth): """ # 加载模型生成积雪深度模型结果 :param saltedata: 卫星数据 :param dataset: ec气象要素数据 :return: """ tmp = [data.reshape(-1, 1) for data in dataset] # 转换基础要素 ele = np.concatenate(tmp, axis=1) ele.resize(56, 901 * 1401, 4) # 转换形状,将上一时刻积雪输入 temp = np.nan_to_num(ele) snowdepth = snowdepth.reshape(-1, 1) # 积雪深度数据,仅包含前一时刻 m1, m2, savepath, roadpath, indexpath, _ = Writefile.readxml( glovar.trafficpath, 0) # m2 = r'/home/cqkj/LZD/Product/Product/Source/snow.pickle' if saltedata is not None: with open(m1, 'rb') as f: model1 = pickle.load(f) ######################################### saltedata.resize(901 * 1401, 1) typecode = 1 else: with open(m2, 'rb') as f: model2 = pickle.load(f) typecode = 2 alldata = [] ################################################ for i in range(56): # temp = [data.reshape(-1, 1) for data in dataset[i]] # 仅包含基础要素 # newdataset = np.concatenate([temp, snowdepth, saltedata], axis=1) if typecode == 1: newdataset = np.concatenate([temp[i], snowdepth, saltedata], axis=1) prediction = np.array(model1.predict(newdataset)) # 每轮结果 if typecode == 2: #print(presnow.shape) # 此处预报结果不可用图像呈现出分块 newdataset = np.concatenate([temp[i], snowdepth], axis=1) prediction = np.array(model2.predict( np.nan_to_num(newdataset))) # 每轮结果 # predictions = np.nan_to_num(model2.predict(np.nan_to_num(newdataset))) predictions = np.nan_to_num(prediction) # predictions[predictions < 0] = 0 print(predictions.shape) snowdepth = predictions[:, np.newaxis] # 结果作为下一次预测的输入 predictions.resize(len(glovar.lat), len(glovar.lon)) sdgrid = np.nan_to_num(predictions) sdgrid[sdgrid < 0] = 0 alldata.append(sdgrid) sp = r'/home/cqkj/QHTraffic/Data//' Writefile.write_to_nc(sp, np.array(alldata), glovar.lat, glovar.lon, 'SnowDepth', glovar.fnames, glovar.filetime) return np.array(alldata) # 返回 [56, 901, 1401]网格数据
def rainData(): # 同步降雨智能网格文件并解析 now = datetime.datetime.now() *_, elements, ftp = Writefile.readxml(glovar.trafficpath, 1) #*_, elements, ftp = Writefile.readxml(r'/home/cqkj/LZD/Product/Product/config/Traffic.xml', 5) element = elements.split(',') ftp = ftp.split(',') grib = Datainterface.GribData() remote_url = os.path.join(r'\\SPCC\\BEXN', now.strftime('%Y'), now.strftime('%Y%m%d')) grib.mirror(element[0], remote_url, element[1], ftp, element[2]) rname = sorted(os.listdir(element[1]))[-1] rpath = element[1] + rname dataset, lat, lon, _ = Znwg.arrange((grib.readGrib(rpath))) # result包含data,lat,lon,size return [interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon) for data in dataset[:56]]
def main(): dataset = iceData() ice = Roadic() icgrid = ice.icegrid(dataset, glovar.lat, glovar.lon) savepath, indexpath = Writefile.readxml(glovar.trafficpath, 4)[1:3] # 此处需进行修改,根据路径 write(savepath, icgrid, 'IceDepth', glovar.lat, glovar.lon) # 先保存厚度网格数据 iceroad = ice.depth2onezero(icgrid, glovar.lat, glovar.lon) ################################################################################ # 获取cimiss数据集 cmissdata = np.loadtxt('/home/cqkj/project/industry/Product/Product/Source/cmsk.csv', delimiter=',') icedays = RoadIceindex(cmissdata, iceroad) roadicing = icedays.iceday() res = roadicing.T write(indexpath, res, 'icigindex', type=1)
def main(): ice = Roadic() dataset = iceData() icgrid = ice.icegrid(dataset, glovar.lat, glovar.lon) savepath, indexpath, _ = Writefile.readxml(glovar.trafficpath, 4)[2:] write(savepath, icgrid, 'IceDepth', glovar.lat, glovar.lon) # 先保存厚度网格数据 iceroad = ice.depth2onezero(icgrid, glovar.lat, glovar.lon) ################################################################################ # 获取cimiss数据集,此处仅为读取,实况数据获取及保存由另一程序实现 cmissdata = np.loadtxt( '/home/cqkj/project/Product/Product/source/cmsk.csv', delimiter=',') icedays = RoadIceindex(cmissdata, iceroad) roadicing = icedays.iceday() write(indexpath, roadicing, 'icingindex', type=1)
def mirrorGrib(path): # 6.14.16测试 # 用来同步8-20/20-8的实况格点场数据气温(开式)、降水(累计)、湿度, 20时同步今天8时数据, 8时同步昨日20时数据 grid = Datainterface.GribData() now = datetime.datetime.now() elements, subdirs, localdirs, _, freq, *ftp = Writefile.readxml( path, 0) # freq应为None elements = elements.split(',') subdirs = subdirs.split(',') localdirs = localdirs.split(',') # 为三个文件夹目录 remote_urls = [ os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d')) for subdir in subdirs ] # 构造三个路径 for localdir, element, remote_url in zip(localdirs, elements, remote_urls): grid.mirror(element, remote_url, localdir, ftp) # 同步至每个文件夹,此处待测试,需保证范围在08-20或次日20-当日08时 # 查看各文件夹里数据信息,此处默认TEM, RAIN, RH 为08-20时的文件名列表 RAINs, RHs, TEMs = [ sorted(os.listdir(localdir)) for localdir in localdirs ] # 零时开始至今 e2tTems = [tem for tem in TEMs if int(tem[-7:-5]) in range(8, 21)] e2tRains = [rain for rain in RAINs if int(rain[-7:-5]) in range(8, 21)] e2tRhs = [rh for rh in RHs if int(rh[-7:-5]) in range(8, 21)] # 认为形状为同一分辨率下的[12, lat * lon] tem = [ Znwg.arrange(grid.readGrib(os.path.join(localdirs[2], TEM))) for TEM in e2tTems ] # temdata 包含四个要素(data, lat, lon, size), 全国范围,需插值到青海 lat, lon = tem[0][1], tem[0][2] temdata = np.array([ np.nan_to_num( interp.interpolateGridData(t[0] - 273.15, lat, lon, glovar.lat, glovar.lon)) for t in tem ]) raindata = np.array([ np.nan_to_num( interp.interpolateGridData( Znwg.arrange(grid.readGrib(os.path.join(localdirs[0], RAIN)))[0], lat, lon, glovar.lat, glovar.lon)) for RAIN in e2tRains ]) rhdata = np.array([ np.nan_to_num( interp.interpolateGridData( Znwg.arrange(grid.readGrib(os.path.join(localdirs[1], RH)))[0], lat, lon, glovar.lat, glovar.lon)) for RH in e2tRhs ]) return temdata, raindata, rhdata
def revise(message): # 订正解析结果,并插值到1km分辨率 mdpath, gcpath, savepath, *_ = Writefile.readxml(glovar.trafficpath, 1) net = torch.load(mdpath) dem = pd.read_csv(gcpath, index_col=0).values arrays = np.nan_to_num([np.array([data, dem]) for data in message]) inputs = torch.from_numpy(arrays) # torch.no_grad() outputs = [net(it[np.newaxis, :]).detach().numpy() for it in inputs] outputs = np.nan_to_num(outputs) outputs[outputs < 0] = 0 print(outputs.shape) output = np.squeeze(outputs) Writefile.write_to_nc(savepath, output, glovar.lat, glovar.lon, 'rain', glovar.fnames, glovar.filetime) return outputs
def windData(path): # 获取数据信息 *_, elements, ftp = Writefile.readxml(path, 2) element = elements.split(',') ftp = ftp.split(',') grib = Datainterface.GribData() remote_url = os.path.join(r'\\SPCC\\BEXN', glovar.now.strftime('%Y'), glovar.now.strftime('%Y%m%d')) grib.mirror(element[0], remote_url, element[1], ftp, element[2]) rname = sorted(os.listdir(element[1]))[-1] rpath = element[1] + rname dataset, lat, lon, _ = Znwg.arrange( (grib.readGrib(rpath))) # result包含data,lat,lon,size return [ interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon) for data in dataset ] # 返回插值后列表格式数据
def predepth(): # 前一时刻积水深度,此处需在服务器端测试优化 dr = np.zeros(shape=(801, 1381)) # 目前默认前一时刻积水深度为0 now = datetime.datetime.now() znwgtm = Znwg.znwgtime() *_, ftp = Writefile.readxml(glovar.trafficpath, 1) grib = Datainterface.GribData() remote_url = os.path.join(r'\\ANALYSIS\\CMPA', now.strftime('%Y'), now.strftime('%Y%m%d')) localdir = r'/home/cqkj/QHTraffic/Product/Product/mirror/rainlive' grib.mirror('FRT_CHN_0P05_3HOR', remote_url, localdir, ftp) rname = sorted(os.listdir(localdir))[-1] rpath = localdir + rname data, lat, lon, _ = Znwg.arrange((grib.readGrib(rpath))) data = interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon) dataset = data[np.newaxis, ] # 符合形状要求 res = FloodModel.cal2(dataset, dr) return res[0]
def mirrorskgrib(path): # 6月15待测试 # 还需写一个同步实况小时所需数据的代码(包括三网格), 滞后15分钟,可使用同一个config文件 grid = Datainterface.GribData() now = datetime.datetime.now() elements, subdirs, localdirs, _, freq, *ftp = Writefile.readxml( path, 0) # freq应为None elements = elements.split(',') subdirs = subdirs.split(',') localdirs = localdirs.split(',') # 为三个文件夹目录 remote_urls = [ os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d')) for subdir in subdirs ] # 构造三个路径 for localdir, element, remote_url in zip(localdirs, elements, remote_urls): grid.mirror(element, remote_url, localdir, ftp) # 同步至每个文件夹,此处待测试,需保证范围在08-20或次日20-当日08时 # 查看各文件夹里数据信息,此处默认TEM, RAIN, RH 为08-20时的文件名列表 RAIN, RH, TEM = [ sorted(os.listdir(localdir))[-1] for localdir in localdirs ] # 零时开始至今 tem = Znwg.arrange(grid.readGrib(os.path.join(localdirs[2], TEM))) lat, lon = tem[1], tem[2] temdata = np.array( np.nan_to_num( interp.interpolateGridData(tem[0] - 273.15, lat, lon, glovar.lat, glovar.lon))) raindata = np.array( np.nan_to_num( interp.interpolateGridData( Znwg.arrange(grid.readGrib(os.path.join(localdirs[0], RAIN)))[0], lat, lon, glovar.lat, glovar.lon))) rhdata = np.array( np.nan_to_num( interp.interpolateGridData( Znwg.arrange(grid.readGrib(os.path.join(localdirs[1], RH)))[0], lat, lon, glovar.lat, glovar.lon))) Time = datetime.datetime.now().strftime('%Y%m%d%H') savepath = ''.join(r'/home/cqkj/QHTraffic/tmp/ele', Time, r'.pkl') # 存储每个时刻的降水、湿度、温度 with open(savepath, 'wb') as f: # 文件名称用时间区分,精确到小时 pickle.dump([temdata, raindata, rhdata], f) return temdata, raindata, rhdata '''
def Weatherdata(path): # ????????????????????? elements, subdirs, localdir, _, freq, *ftp = Writefile.readxml(path, 1) now = datetime.datetime.now() elements = elements.split(',') subdirs = subdirs.split(',') remote_urls = [ os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d')) for subdir in subdirs ] # ?????? grib = Datainterface.GribData() ''' [grib.mirror(element, remote_url, localdir, ftp, freq=freq) for element, remote_url in zip(elements[:-1], remote_urls[:-1])] # ???????????????????????????(24003) ''' for element, remote_url in zip(elements[:-1], remote_urls[:-1]): grib.mirror(element, remote_url, localdir, ftp, freq=freq) grib.mirror(elements[-1], remote_urls[-1], localdir, ftp, freq='24024') # ??????????? # ?????????????????????????????????????pattern strings = ','.join(os.listdir(localdir)) patterns = [ r'(\w+.EDA.*?.GRB2)', r'(\w+.ERH.*?.GRB2)', r'(\w+.TMP.*?.GRB2)', r'(\w+.ER24.*?.GRB2)' ] allpath = [ localdir + sorted(Znwg.regex(pattern, strings), key=str.lower)[-1] for pattern in patterns ] # allpath????????????????????? ele14list = slice(1, 74, 8) # ??+2-1??????????????10?????14????????? ####????????wind????u???v?? wind = grib.readGrib(allpath[0])[0] windu_v = np.array([v for _, v in wind.items()]) windu, windv = windu_v[::2][ele14list], windu_v[1::2][ele14list] data = np.array([ Znwg.arrange(grib.readGrib(path))[0][ele14list] for path in allpath[1:-1] ]) # ????????????????? #er, lat, lon, size = Znwg.arrange(grib.readGrib(allpath[-1], nlat=glovar.lat, nlon=glovar.lon)) # ??????????????????????????????????????? er, lat, lon, size = Znwg.arrange( [grib.readGrib(allpath[-1], nlat=glovar.latt, nlon=glovar.lonn)][0]) result = windu, windv, *data, er # ??????????[4,10,181,277]???? return result, lat, lon
def revise(path, message): # 订正解析结果,并插值到1km分辨率 mdpath, _, gcpath, savepath, indexpath, *_ = Writefile.readxml(path, 2) data = [message[::2, :, :][:56], message[1::2, :, :][:56]] # data = [data[::2, :, :][:56], data[1::2, :, :][:56]] # 分别取出来U风V风 net = torch.load(mdpath) net.eval() dem = pd.read_csv(gcpath, index_col=0).values arrays = np.nan_to_num( [np.array([i, j, dem]) for i, j in zip(data[0], data[1])]) inputs = torch.from_numpy(arrays) torch.no_grad() outputs = [net(it[np.newaxis, :]).detach().numpy() for it in inputs] output = np.squeeze(np.nan_to_num(outputs)) datau, datav = output[:, 0], output[:, 1] Writefile.write_to_nc(savepath, datau, glovar.lat, glovar.lon, 'U', glovar.fnames, glovar.filetime) Writefile.write_to_nc(savepath, datav, glovar.lat, glovar.lon, 'V', glovar.fnames, glovar.filetime) return indexpath, datau, datav
def revise(path, message): # 订正解析结果,并插值到1km分辨率 mdpath, _, gcpath, savepath, indexpath, *_ = Writefile.readxml(path, 2) data = np.array(message) data = [ np.nan_to_num(data[::2, :, :][:56]), np.nan_to_num(data[1::2, :, :][:56]) ] net = torch.load(mdpath) net.eval() dem = pd.read_csv(gcpath, index_col=0).values arrays = np.array([ np.array([i, j, dem]) for i, j in zip(data[0][:, :801, :1381], data[1][:, :801, :1381]) ]) inputs = torch.from_numpy(arrays) torch.no_grad() outputs = [ np.nan_to_num(net(it[np.newaxis, :]).detach().numpy()) for it in inputs ] datau, datav = np.squeeze(outputs)[:, 0, ...], np.squeeze(outputs)[:, 1, ...] # 统一格式 lat = np.linspace(31.4, 39.4, 801) lon = np.linspace(89.3, 103.1, 1381) uwind = [ np.nan_to_num( interp.interpolateGridData(u, lat, lon, glovar.lat, glovar.lon)) for u in datau ] vwind = [ np.nan_to_num( interp.interpolateGridData(v, lat, lon, glovar.lat, glovar.lon)) for v in datav ] Writefile.write_to_nc(savepath, np.array(uwind), glovar.lat, glovar.lon, 'U', glovar.fnames, glovar.filetime) Writefile.write_to_nc(savepath, np.array(vwind), glovar.lat, glovar.lon, 'V', glovar.fnames, glovar.filetime) return indexpath, datau, datav
def reverse(dataset): # 此函数用来生成地表最高、 最低温度产品,对现有模型进行整体改动 maxpath, minpath, *_ = Writefile.readxml(glovar.trafficpath, 3) with open(maxpath, 'rb') as f: maxmodel = pickle.load(f) with open(minpath, 'rb') as f: minmodel = pickle.load(f) # dataset.resize(56, 801 * 1381, 4) ################################################ temp = [data.reshape(-1, 1) for data in dataset] # 数据量可能过大 allele = np.concatenate(temp, axis=1) # 训练用元素 # 先采用一个进行测试 maxvalue = maxmodel.predict(allele).reshape(56, 901, 1401) minvalue = minmodel.predict(allele).reshape(56, 901, 1401) # 路径 savepath = r'/home/cqkj/QHtraffic/Data/skint//' Writefile.write_to_nc(savepath, maxvalue, glovar.lat, glovar.lon, 'maxskint', glovar.fnames, glovar.filetime) Writefile.write_to_nc(savepath, minvalue, glovar.lat, glovar.lon, 'minskint', glovar.fnames, glovar.filetime) return maxvalue, minvalue # 返回地表最高、最低温度
def main(): snowpath, gpath, fpath, rainpath, fspath, gspath, mspath = Writefile.readxml( glovar.forestpath, 0) # ???nc?????????? snow = snowdepth(snowpath) # ???????[10, 801, 1381] data, *_ = Weatherdata(glovar.forestpath) # ?????????? ldtype = landtype(gpath, fpath) gindex, findex, mindex = firelevel(data, rainpath, snow, ldtype) # ??????????? filetime = glovar.filetime fh = range(10) fnames = ['_%03d' % i for i in fh] Writefile.write_to_nc_fire(gspath, gindex, name='greenfire', lat=glovar.lat, lon=glovar.lon, filetime=filetime, fnames=fnames, elename=None, nums=1) Writefile.write_to_nc_fire(fspath, findex, name='forestfire', filetime=filetime, fnames=fnames, lat=glovar.lat, lon=glovar.lon, elename=None, nums=1) Writefile.write_to_nc_fire(mspath, mindex, filetime=filetime, fnames=fnames, lat=glovar.lat, lon=glovar.lon, name='meteorological', elename='risk', nums=1)
def liverain(path, pklpath): # ???????ZNWG???????????????pickle # ???????????????????????????? elements, _, localdir, historydir, freq, *ftp = Writefile.readxml(path, 1) now = datetime.datetime.now() ytd = now - datetime.timedelta(days=1) dir = r'//ANALYSIS//CMPA//0P05' remote_url = os.path.join(dir, now.strftime('%Y'), now.strftime('%Y%m%d')) grb = Datainterface.GribData() grb.mirror('FAST_CHN_0P05_DAY-PRE', remote_url, localdir, ftp, freq=None) # ?????????????? rainpaths = sorted(os.listdir(localdir))[-1] os.chdir(localdir) rainlive, lat, lon, res = Znwg.arrange( [grb.readGrib(rainpaths, nlat=glovar.latt, nlon=glovar.lonn)][0]) ####?????????????????? with open(pklpath, 'rb') as f: data = pickle.load(f) data.append(rainlive) # ????deque???? with open(pklpath, 'wb') as f: pickle.dump(rainlive, f) return rainlive
def __init__(self): self._path = glovar.trafficpath self.mpath, self.roadpath = Writefile.readxml(self._path, 1)[:2]
def __init__(self): self._path = glovar.trafficpath self.mpath, self.roadpath = Writefile.readxml(self._path, 1)[:2] self.dics = Writefile.readxml(self._path, 2)[0].split(',')