def revise(message): # 订正解析结果,并插值到1km分辨率 mdpath, gcpath, savepath, *_ = Writefile.readxml(glovar.trafficpath, 1) net = torch.load(mdpath) dem = pd.read_csv(gcpath, index_col=0).values arrays = np.array( [np.nan_to_num([data, dem]) for data in message[:, :801, :1381]]) inputs = torch.from_numpy(arrays) # torch.no_grad() outputs = [net(it[np.newaxis, :]).detach().numpy() for it in inputs] outputs = np.nan_to_num(outputs) outputs[outputs < 0] = 0 print(outputs.shape) output = np.squeeze(outputs) lat = np.linspace(31.4, 39.4, 801) lon = np.linspace(89.3, 103.1, 1381) raingb = np.array([ np.nan_to_num( interp.interpolateGridData(op, lat, lon, glovar.lat, glovar.lon)) for op in output ]) Writefile.write_to_nc(savepath, raingb, glovar.lat, glovar.lon, 'Rain', glovar.fnames, glovar.filetime) return outputs
def write(path, data, name, lat=None, lon=None, type=0): filetime = ecmwf.ecreptime() fh = range(3, 169, 3) fnames = ['_%03d' % i for i in fh] if type == 0: Writefile.write_to_nc(path, data, lat, lon, name, fnames, filetime) else: Writefile.write_to_csv(path, data, name, fnames, filetime)
def write(self, data, lat=None, lon=None, type=0): filetime = ecmwf.ecreptime() fh = range(3, 169, 3) fnames = ['_%03d' % i for i in fh] name = 'Snow' if type == 0: Writefile.write_to_nc(self.savepath, data, lat, lon, name, fnames, filetime) else: Writefile.write_to_csv(self.indexpath, data, 'SnowIndex', fnames, filetime)
def reverse(saltedata, dataset, snowdepth): """ # 加载模型生成积雪深度模型结果 :param saltedata: 卫星数据 :param dataset: ec气象要素数据 :return: """ tmp = [data.reshape(-1, 1) for data in dataset] # 转换基础要素 ele = np.concatenate(tmp, axis=1) ele.resize(56, 901 * 1401, 4) # 转换形状,将上一时刻积雪输入 temp = np.nan_to_num(ele) snowdepth = snowdepth.reshape(-1, 1) # 积雪深度数据,仅包含前一时刻 m1, m2, savepath, roadpath, indexpath, _ = Writefile.readxml( glovar.trafficpath, 0) # m2 = r'/home/cqkj/LZD/Product/Product/Source/snow.pickle' if saltedata is not None: with open(m1, 'rb') as f: model1 = pickle.load(f) ######################################### saltedata.resize(901 * 1401, 1) typecode = 1 else: with open(m2, 'rb') as f: model2 = pickle.load(f) typecode = 2 alldata = [] ################################################ for i in range(56): # temp = [data.reshape(-1, 1) for data in dataset[i]] # 仅包含基础要素 # newdataset = np.concatenate([temp, snowdepth, saltedata], axis=1) if typecode == 1: newdataset = np.concatenate([temp[i], snowdepth, saltedata], axis=1) prediction = np.array(model1.predict(newdataset)) # 每轮结果 if typecode == 2: #print(presnow.shape) # 此处预报结果不可用图像呈现出分块 newdataset = np.concatenate([temp[i], snowdepth], axis=1) prediction = np.array(model2.predict( np.nan_to_num(newdataset))) # 每轮结果 # predictions = np.nan_to_num(model2.predict(np.nan_to_num(newdataset))) predictions = np.nan_to_num(prediction) # predictions[predictions < 0] = 0 print(predictions.shape) snowdepth = predictions[:, np.newaxis] # 结果作为下一次预测的输入 predictions.resize(len(glovar.lat), len(glovar.lon)) sdgrid = np.nan_to_num(predictions) sdgrid[sdgrid < 0] = 0 alldata.append(sdgrid) sp = r'/home/cqkj/QHTraffic/Data//' Writefile.write_to_nc(sp, np.array(alldata), glovar.lat, glovar.lon, 'SnowDepth', glovar.fnames, glovar.filetime) return np.array(alldata) # 返回 [56, 901, 1401]网格数据
def clcRoadic(snowdepth, skint, rain): # 计算道路结冰厚度数据 now = datetime.datetime.now().strftime('%Y%m%d%H') # ######### with open(r'道路结冰模型', 'rb') as f: model = pickle.load(f) Roadic = model.predict( [snowdepth.reshape[-1, 1], skint.reshape[-1, 1], rain.reshape[-1, 1]]) Roadic.resize(1, glovar.lat, glovar.lon) path = r'填入网格文件的保存路径' Writefile.write_to_nc(path, Roadic, glovar.lat, glovar.lon, 'iceDepth', '', now) return Roadic
def revise(message): # 订正解析结果,并插值到1km分辨率 mdpath, gcpath, savepath, *_ = Writefile.readxml(glovar.trafficpath, 1) net = torch.load(mdpath) dem = pd.read_csv(gcpath, index_col=0).values arrays = np.nan_to_num([np.array([data, dem]) for data in message]) inputs = torch.from_numpy(arrays) # torch.no_grad() outputs = [net(it[np.newaxis, :]).detach().numpy() for it in inputs] outputs = np.nan_to_num(outputs) outputs[outputs < 0] = 0 print(outputs.shape) output = np.squeeze(outputs) Writefile.write_to_nc(savepath, output, glovar.lat, glovar.lon, 'rain', glovar.fnames, glovar.filetime) return outputs
def revise(path, message): # 订正解析结果,并插值到1km分辨率 mdpath, _, gcpath, savepath, indexpath, *_ = Writefile.readxml(path, 2) data = [message[::2, :, :][:56], message[1::2, :, :][:56]] # data = [data[::2, :, :][:56], data[1::2, :, :][:56]] # 分别取出来U风V风 net = torch.load(mdpath) net.eval() dem = pd.read_csv(gcpath, index_col=0).values arrays = np.nan_to_num( [np.array([i, j, dem]) for i, j in zip(data[0], data[1])]) inputs = torch.from_numpy(arrays) torch.no_grad() outputs = [net(it[np.newaxis, :]).detach().numpy() for it in inputs] output = np.squeeze(np.nan_to_num(outputs)) datau, datav = output[:, 0], output[:, 1] Writefile.write_to_nc(savepath, datau, glovar.lat, glovar.lon, 'U', glovar.fnames, glovar.filetime) Writefile.write_to_nc(savepath, datav, glovar.lat, glovar.lon, 'V', glovar.fnames, glovar.filetime) return indexpath, datau, datav
def revise(path, message): # 订正解析结果,并插值到1km分辨率 mdpath, _, gcpath, savepath, indexpath, *_ = Writefile.readxml(path, 2) data = np.array(message) data = [ np.nan_to_num(data[::2, :, :][:56]), np.nan_to_num(data[1::2, :, :][:56]) ] net = torch.load(mdpath) net.eval() dem = pd.read_csv(gcpath, index_col=0).values arrays = np.array([ np.array([i, j, dem]) for i, j in zip(data[0][:, :801, :1381], data[1][:, :801, :1381]) ]) inputs = torch.from_numpy(arrays) torch.no_grad() outputs = [ np.nan_to_num(net(it[np.newaxis, :]).detach().numpy()) for it in inputs ] datau, datav = np.squeeze(outputs)[:, 0, ...], np.squeeze(outputs)[:, 1, ...] # 统一格式 lat = np.linspace(31.4, 39.4, 801) lon = np.linspace(89.3, 103.1, 1381) uwind = [ np.nan_to_num( interp.interpolateGridData(u, lat, lon, glovar.lat, glovar.lon)) for u in datau ] vwind = [ np.nan_to_num( interp.interpolateGridData(v, lat, lon, glovar.lat, glovar.lon)) for v in datav ] Writefile.write_to_nc(savepath, np.array(uwind), glovar.lat, glovar.lon, 'U', glovar.fnames, glovar.filetime) Writefile.write_to_nc(savepath, np.array(vwind), glovar.lat, glovar.lon, 'V', glovar.fnames, glovar.filetime) return indexpath, datau, datav
def reverse(dataset): # 此函数用来生成地表最高、 最低温度产品,对现有模型进行整体改动 maxpath, minpath, *_ = Writefile.readxml(glovar.trafficpath, 3) with open(maxpath, 'rb') as f: maxmodel = pickle.load(f) with open(minpath, 'rb') as f: minmodel = pickle.load(f) # dataset.resize(56, 801 * 1381, 4) ################################################ temp = [data.reshape(-1, 1) for data in dataset] # 数据量可能过大 allele = np.concatenate(temp, axis=1) # 训练用元素 # 先采用一个进行测试 maxvalue = maxmodel.predict(allele).reshape(56, 901, 1401) minvalue = minmodel.predict(allele).reshape(56, 901, 1401) # 路径 savepath = r'/home/cqkj/QHtraffic/Data/skint//' Writefile.write_to_nc(savepath, maxvalue, glovar.lat, glovar.lon, 'maxskint', glovar.fnames, glovar.filetime) Writefile.write_to_nc(savepath, minvalue, glovar.lat, glovar.lon, 'minskint', glovar.fnames, glovar.filetime) return maxvalue, minvalue # 返回地表最高、最低温度