Exemplo n.º 1
0
def press(savepath, datau, datav):
    # 计算出道路点风压大小
    mes = pd.read_csv(glovar.windpath, index_col=0)
    lon, lat = mes['Lon'], mes['Lat']  # 提取出道路坐标
    length = np.linalg.norm(mes.iloc[:, -2:], axis=1)
    ur = np.divide(mes['ur'], length)
    vr = np.divide(mes['vr'], length)
    uv = [[i, j] for i, j in zip(ur, vr)]
    latt, lonn = np.linspace(31.4, 39.4, 801), np.linspace(89.3, 103.1, 1381)
    for u, v in zip(datau, datav):
        uvalue = interp.interpolateGridData(u,
                                            latt,
                                            lonn,
                                            lat,
                                            lon,
                                            isGrid=False)
        vvalue = interp.interpolateGridData(v,
                                            latt,
                                            lonn,
                                            lat,
                                            lon,
                                            isGrid=False)
        UVvalue = [[i, j] for i, j in zip(uvalue, vvalue)]
        windcross = []
        for i, value in enumerate(UVvalue):
            datas = np.cross(uv, np.array(value).T)
            w = 1 / 2 * 1.29 * (np.square(datas)) * 1000
            w = np.piecewise(w, [
                w < 83, (w >= 83) & (w < 134), (w >= 134) & (w < 602),
                (w >= 602) & (w < 920), w >= 920
            ], [1, 2, 3, 4, 5])
            windcross.append(w[np.newaxis, :])
    Writefile.write_to_csv(savepath, windcross, 'windindex', glovar.fnames,
                           glovar.filetime)
Exemplo n.º 2
0
def main():
    saltedata = saltedata(path)
    snowpre = np.random.randint(0, 1, size=(801 * 1381, 1))
    snow = SnowDepth()
    rep = ecmwf.ecreptime()
    fh = [i for i in range(12, 181, 3)]
    region = [
        float(i) for i in ','.join(
            Writefile.readxml(
                r'/home/cqkj/QHTraffic/Product/Traffic/SNOD/config.xml',
                0)).split(',')
    ]
    new_lon = np.arange(region[0], region[2], region[-1])
    new_lat = np.arange(region[1], region[3], region[-1])
    lonlatset, dataset = [], []
    # 提取数据及经纬度(双重循环,看能否改进)
    for dic in snow.dics:
        lon, lat, data = Datainterface.micapsdata(rep, dic, fh)
        lonlatset.append((lon, lat))
        for i in range(data.shape[0] - 1):
            if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]):
                data[i] = data[i + 1] / 2
                data[i + 1] = data[i + 1] / 2
                interp.interpolateGridData(data, lat, lon, new_lat, new_lon)
            else:
                interp.interpolateGridData(data, lat, lon, new_lat, new_lon)
        dataset.append(data)  # 保存插值后的数据集
    depthgrid = snow.clcsd(dataset, new_lat, new_lon, saltedata, snowpre)
    snow.write(depthgrid, new_lat, new_lon)
    dangerindex = snow.clcindex(depthgrid, new_lat, new_lon)
    snow.write(dangerindex, type=1)
Exemplo n.º 3
0
def main():
    ice = Roadic()
    rep = ecmwf.ecreptime()
    fh = [i for i in range(12, 181, 3)]
    region = [float(i) for i in ','.join(Writefile.readxml(glovar.trafficpath, 0)).split(',')]
    new_lon = np.arange(region[0], region[2], region[-1])
    new_lat = np.arange(region[1], region[3], region[-1])
    lonlatset, dataset = [], []
    # 提取数据及经纬度(双重循环,看能否改进)
    for dic in ice.dics:
        lon, lat, data = Datainterface.micapsdata(rep, dic, fh)
        lonlatset.append((lon, lat))
        for i in range(data.shape[0] - 1):
            if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]):
                data[i] = data[i + 1] / 2
                data[i+1] = data[i + 1] / 2
                interp.interpolateGridData(data,lat,lon,new_lat, new_lon)
            else:
                interp.interpolateGridData(data, lat, lon,new_lat, new_lon)
        dataset.append(data)                     # 保存插值后的数据集
    icgrid = ice.icegrid(dataset, new_lat, new_lon)
    savepath, indexpath = Writefile.readxml(glovar.trafficpath, 1)[2:]
    write(savepath, icgrid, 'Roadic', new_lat, new_lon)               # 先保存厚度网格数据
    iceroad = ice.depth2onezero(icgrid, new_lat, new_lon)
    ################################################################################
    # 获取cimiss数据集,此处仅为读取,实况数据获取及保存由另一程序实现
    cmissdata = np.loadtxt('/home/cqkj/QHTraffic/qhroadic/cmsk.csv', delimiter=',')
    icedays = RoadIceindex(cmissdata, iceroad)
    roadicing = icedays.iceday()
    write(indexpath, roadicing, 'RoadicIndex', type=1)
Exemplo n.º 4
0
def iceData():
    # 获取ec数据信息(气温、降水、地温、湿度、积雪深度)
    ectime = ecmwf.ecreptime()
    fh = [i for i in range(12, 181, 3)]  # 20点的预报获取今天8:00的ec预报
    *_, dics = Writefile.readxml(glovar.trafficpath, 4)
    dicslist = dics.split(',')
    lonlatset, dataset = [], []
    for dic in dicslist:
        newdata = []
        lon, lat, data = Datainterface.micapsdata(ectime, dic, fh)
        lonlatset.append((lon, lat))
        for i in range(data.shape[0] - 1):
            if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]):
                data[i] = data[i + 1] / 2
                data[i + 1] = data[i + 1] / 2
                newdata.append(
                    interp.interpolateGridData(data[i], lat, lon, glovar.lat,
                                               glovar.lon))
            else:
                newdata.append(
                    interp.interpolateGridData(data[i], lat, lon, glovar.lat,
                                               glovar.lon))
        newdata = np.array(newdata)
        # newdata[newdata<0] = 0                    # 保证数据正确性
        dataset.append(newdata)  # 保存插值后的数据集
    return np.array(dataset)
Exemplo n.º 5
0
def mirrorGrib(path):
    # 6.14.16测试
    # 用来同步8-20/20-8的实况格点场数据气温(开式)、降水(累计)、湿度, 20时同步今天8时数据, 8时同步昨日20时数据
    grid = Datainterface.GribData()
    now = datetime.datetime.now()
    elements, subdirs, localdirs, _, freq, *ftp = Writefile.readxml(
        path, 0)  # freq应为None
    elements = elements.split(',')
    subdirs = subdirs.split(',')
    localdirs = localdirs.split(',')  # 为三个文件夹目录
    remote_urls = [
        os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d'))
        for subdir in subdirs
    ]  # 构造三个路径
    for localdir, element, remote_url in zip(localdirs, elements, remote_urls):
        grid.mirror(element, remote_url, localdir,
                    ftp)  # 同步至每个文件夹,此处待测试,需保证范围在08-20或次日20-当日08时
    # 查看各文件夹里数据信息,此处默认TEM, RAIN, RH 为08-20时的文件名列表
    RAINs, RHs, TEMs = [
        sorted(os.listdir(localdir)) for localdir in localdirs
    ]  # 零时开始至今
    e2tTems = [tem for tem in TEMs if int(tem[-7:-5]) in range(8, 21)]
    e2tRains = [rain for rain in RAINs if int(rain[-7:-5]) in range(8, 21)]
    e2tRhs = [rh for rh in RHs if int(rh[-7:-5]) in range(8, 21)]
    # 认为形状为同一分辨率下的[12, lat * lon]
    tem = [
        Znwg.arrange(grid.readGrib(os.path.join(localdirs[2], TEM)))
        for TEM in e2tTems
    ]  # temdata 包含四个要素(data, lat, lon, size), 全国范围,需插值到青海
    lat, lon = tem[0][1], tem[0][2]
    temdata = np.array([
        np.nan_to_num(
            interp.interpolateGridData(t[0] - 273.15, lat, lon, glovar.lat,
                                       glovar.lon)) for t in tem
    ])
    raindata = np.array([
        np.nan_to_num(
            interp.interpolateGridData(
                Znwg.arrange(grid.readGrib(os.path.join(localdirs[0],
                                                        RAIN)))[0], lat, lon,
                glovar.lat, glovar.lon)) for RAIN in e2tRains
    ])
    rhdata = np.array([
        np.nan_to_num(
            interp.interpolateGridData(
                Znwg.arrange(grid.readGrib(os.path.join(localdirs[1], RH)))[0],
                lat, lon, glovar.lat, glovar.lon)) for RH in e2tRhs
    ])
    return temdata, raindata, rhdata
Exemplo n.º 6
0
def clcRoadic(snowdepth, skint, rain, station, modelpath, savepath):
    # 计算道路结冰厚度数据, rain从积雪要素同步里拿, 待测试
    now = datetime.datetime.now().strftime('%Y%m%d%H')
    # #########
    with open(modelpath, 'rb') as f:
        model = pickle.load(f)

    ele = np.concatenate(
        [snowdepth.reshape(-1, 1),
         skint.reshape(-1, 1),
         rain.reshape(-1, 1)],
        axis=1)
    Roadic = model.predict(ele)
    Roadic.resize(1, 901, 1401)
    # 保存一小时结冰网格
    #path = savepath
    #Writefile.write_to_nc(savepath, Roadic, glovar.lat, glovar.lon, 'iceDepth', '', now)

    # 结冰厚度插值到站点
    stationSnow = np.nan_to_num(
        interp.interpolateGridData(Roadic,
                                   glovar.lat,
                                   glovar.lon,
                                   station.lat.values,
                                   station.lon.values,
                                   isGrid=False))  # 插值到站点
    # 完成插入并更新操作
    time = datetime.datetime.now().strftime('%Y%m%d%H')
    alltime = np.repeat(time, len(stationSnow))
    df = pd.DataFrame(stationSnow[:, np.newaxis], columns=['snow'])
    Time = pd.DataFrame(alltime[:, np.newaxis], columns=['Time'])
    dataframe = pd.concat([station, df, Time], axis=1)

    return dataframe
Exemplo n.º 7
0
def presnow():
    # 得到前一时刻积雪深度
    ectime = ecmwf.ecreptime()
    fh = [0]
    dic = 'ECMWF_HR/SNOD'
    lon, lat, data = Datainterface.micapsdata(ectime, dic, fh)
    return interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon)
Exemplo n.º 8
0
def mirrorSkint(hours):
    # 6.14.16测试
    # 从cimiss中获取所有站点地温插值,hours控制选取的时间间隔
    interfaceId = 'getSurfEleInRegionByTimeRange'
    elements = "Station_Id_C,Lat,Lon,GST,Year,Mon,Day,Hour"
    # 设定每天8:00、 20:00运行
    lastime = datetime.datetime.now().replace(
        minute=0, second=0) - datetime.timedelta(hours=8)
    firstime = lastime - datetime.timedelta(hours=hours)
    temp = ('[', firstime.strftime('%Y%m%d%H%M%S'), ',',
            lastime.strftime('%Y%m%d%H%M%S'), ')')
    Time = ''.join(temp)
    params = {
        'dataCode': "SURF_CHN_MUL_HOR",  # 需更改为地面逐小时资料
        'elements': elements,
        'timeRange': Time,
        'adminCodes': "630000"
    }
    initData = Datainterface.cimissdata(interfaceId, elements,
                                        **params)  # 获取出cimiss初始数据
    initData.Time = pd.to_datetime(initData.Time)
    # 需按时间进行划分插值,得到 地温网格数据
    timeList = pd.to_datetime(initData.Time.unique()).strftime(
        '%Y%m%d%H%M%S')  # 此处可能存在问题,目的仅按顺序取出来时间
    initData.set_index(initData.Time, inplace=True)
    oneHourgrid = [
        np.nan_to_num(
            interp.interpolateGridData(
                initData.loc[tm].GST.values.astype('float32'),
                initData.loc[tm].Lat.values.astype('float32'),
                initData.loc[tm].Lon.values.astype('float32'), glovar.lat,
                glovar.lon)) for tm in timeList
    ]
    return oneHourgrid  # oneHourgrid 格点数据形状应为[12, newlat, newlon]
Exemplo n.º 9
0
def revise(message):
    # 订正解析结果,并插值到1km分辨率
    mdpath, gcpath, savepath, *_ = Writefile.readxml(glovar.trafficpath, 1)
    net = torch.load(mdpath)
    dem = pd.read_csv(gcpath, index_col=0).values
    arrays = np.array(
        [np.nan_to_num([data, dem]) for data in message[:, :801, :1381]])
    inputs = torch.from_numpy(arrays)
    # torch.no_grad()
    outputs = [net(it[np.newaxis, :]).detach().numpy() for it in inputs]
    outputs = np.nan_to_num(outputs)
    outputs[outputs < 0] = 0
    print(outputs.shape)
    output = np.squeeze(outputs)
    lat = np.linspace(31.4, 39.4, 801)
    lon = np.linspace(89.3, 103.1, 1381)
    raingb = np.array([
        np.nan_to_num(
            interp.interpolateGridData(op, lat, lon, glovar.lat, glovar.lon))
        for op in output
    ])

    Writefile.write_to_nc(savepath, raingb, glovar.lat, glovar.lon, 'Rain',
                          glovar.fnames, glovar.filetime)
    return outputs
Exemplo n.º 10
0
def write_to_nc(path, data, name, fnames, filetime, de):
    # dt 为所需提取点数据
    cirnum = data.shape[0]
    print(cirnum)
    for i in range(cirnum):
        print(data[i][np.newaxis, :].shape)
        #print(dt.datetime.strptime(filetime,'%Y%m%d%H%M'))
        # 添加预报入库部分代码
        if i < 4:
            # 对应到待检验站点上,有的话用in,没有则插值, name为df列名
            tmp = np.nan_to_num(
                interp.interpolateGridData(data[i],
                                           glovar.lat,
                                           glovar.lon,
                                           de.Lat,
                                           de.Lon,
                                           isGrid=None))
            time = (datetime.datetime.now() +
                    datetime.timedelta(hours=i * 3)).strftime(
                        '%Y%m%d%H')  # 此处应换为智能网格time
            alltime = np.repeat(time, len(tmp))
            df = pd.DataFrame(tmp[:, np.newaxis], columns=[name])
            Time = pd.DataFrame(alltime[:, np.newaxis], columns=['Time'])
            de.reset_index(inplace=True)
            dataframe = pd.concat([de, df, Time], axis=1)
            # 入库
        ds = xr.Dataset(
            {name: (['time', 'lat', 'lon'], data[i][np.newaxis, :])},
            coords={
                'time': [de.datetime.strptime(filetime, '%Y%m%d%H%M')],
                'lat': lat,
                'lon': lon
            })
        f = ''.join([path, filetime, '.', fnames[i], '.', name, '.nc'])
        ds.to_netcdf(f, format='NETCDF3_CLASSIC')
Exemplo n.º 11
0
def mirrorskgrib(path):
    # 6月15待测试
    # 还需写一个同步实况小时所需数据的代码(包括三网格), 滞后15分钟,可使用同一个config文件
    grid = Datainterface.GribData()
    now = datetime.datetime.now()
    elements, subdirs, localdirs, _, freq, *ftp = Writefile.readxml(
        path, 0)  # freq应为None
    elements = elements.split(',')
    subdirs = subdirs.split(',')
    localdirs = localdirs.split(',')  # 为三个文件夹目录
    remote_urls = [
        os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d'))
        for subdir in subdirs
    ]  # 构造三个路径
    for localdir, element, remote_url in zip(localdirs, elements, remote_urls):
        grid.mirror(element, remote_url, localdir,
                    ftp)  # 同步至每个文件夹,此处待测试,需保证范围在08-20或次日20-当日08时
    # 查看各文件夹里数据信息,此处默认TEM, RAIN, RH 为08-20时的文件名列表
    RAIN, RH, TEM = [
        sorted(os.listdir(localdir))[-1] for localdir in localdirs
    ]  # 零时开始至今
    tem = Znwg.arrange(grid.readGrib(os.path.join(localdirs[2], TEM)))
    lat, lon = tem[1], tem[2]
    temdata = np.array(
        np.nan_to_num(
            interp.interpolateGridData(tem[0] - 273.15, lat, lon, glovar.lat,
                                       glovar.lon)))
    raindata = np.array(
        np.nan_to_num(
            interp.interpolateGridData(
                Znwg.arrange(grid.readGrib(os.path.join(localdirs[0],
                                                        RAIN)))[0], lat, lon,
                glovar.lat, glovar.lon)))
    rhdata = np.array(
        np.nan_to_num(
            interp.interpolateGridData(
                Znwg.arrange(grid.readGrib(os.path.join(localdirs[1], RH)))[0],
                lat, lon, glovar.lat, glovar.lon)))
    Time = datetime.datetime.now().strftime('%Y%m%d%H')
    savepath = ''.join(r'/home/cqkj/QHTraffic/tmp/ele', Time, r'.pkl')
    # 存储每个时刻的降水、湿度、温度
    with open(savepath, 'wb') as f:  # 文件名称用时间区分,精确到小时
        pickle.dump([temdata, raindata, rhdata], f)
    return temdata, raindata, rhdata
    '''
Exemplo n.º 12
0
 def depth2onezero(self, icegrid, lat, lon):
     roadmes = pd.read_csv(self.roadpath,indexcol=0)
     station_lat, station_lon = roadmes.Lat, roadmes.Lon
     icegrid = np.where(icegrid > 0.05, 1, 0)
     iceindex = []
     for i in range(56):
         iceindex.append(interp.interpolateGridData(icegrid[i], lat, lon, station_lat, station_lon, isGrid=False)[:, np.newaxis])
     iceroad = np.concatenate(iceindex, axis=1)
     return iceroad
Exemplo n.º 13
0
 def depth2onezero(self, icegrid, lat,lon):
     with open(self.roadpath, 'rb') as f:
         roadmes = pickle.load(f)
     station_lat, station_lon = roadmes.lat, roadmes.lon
     icegrid = np.where(icegrid>0.05, 1, 0)
     iceindex = []
     for i in range(56):
         iceindex.append(interp.interpolateGridData(icegrid[i], lat, lon, station_lat, station_lon, isGrid=False))
     iceroad = np.concatenate(iceindex, axis=1)
     return iceroad
Exemplo n.º 14
0
def revise(path, message):
    # 订正解析结果,并插值到1km分辨率
    mdpath, _, gcpath, savepath, indexpath, *_ = Writefile.readxml(path, 2)
    data = np.array(message)
    data = [
        np.nan_to_num(data[::2, :, :][:56]),
        np.nan_to_num(data[1::2, :, :][:56])
    ]
    net = torch.load(mdpath)
    net.eval()
    dem = pd.read_csv(gcpath, index_col=0).values
    arrays = np.array([
        np.array([i, j, dem])
        for i, j in zip(data[0][:, :801, :1381], data[1][:, :801, :1381])
    ])
    inputs = torch.from_numpy(arrays)
    torch.no_grad()
    outputs = [
        np.nan_to_num(net(it[np.newaxis, :]).detach().numpy()) for it in inputs
    ]
    datau, datav = np.squeeze(outputs)[:, 0, ...], np.squeeze(outputs)[:, 1,
                                                                       ...]
    # 统一格式
    lat = np.linspace(31.4, 39.4, 801)
    lon = np.linspace(89.3, 103.1, 1381)
    uwind = [
        np.nan_to_num(
            interp.interpolateGridData(u, lat, lon, glovar.lat, glovar.lon))
        for u in datau
    ]
    vwind = [
        np.nan_to_num(
            interp.interpolateGridData(v, lat, lon, glovar.lat, glovar.lon))
        for v in datav
    ]

    Writefile.write_to_nc(savepath, np.array(uwind), glovar.lat, glovar.lon,
                          'U', glovar.fnames, glovar.filetime)
    Writefile.write_to_nc(savepath, np.array(vwind), glovar.lat, glovar.lon,
                          'V', glovar.fnames, glovar.filetime)
    return indexpath, datau, datav
Exemplo n.º 15
0
def rainData():
    # 同步降雨智能网格文件并解析
    now = datetime.datetime.now()
    *_, elements, ftp = Writefile.readxml(glovar.trafficpath, 1)
    #*_, elements, ftp = Writefile.readxml(r'/home/cqkj/LZD/Product/Product/config/Traffic.xml', 5)
    element = elements.split(',')
    ftp = ftp.split(',')
    grib = Datainterface.GribData()
    remote_url = os.path.join(r'\\SPCC\\BEXN', now.strftime('%Y'), now.strftime('%Y%m%d'))
    grib.mirror(element[0], remote_url, element[1], ftp, element[2])
    rname = sorted(os.listdir(element[1]))[-1]
    rpath = element[1] + rname
    dataset, lat, lon, _ = Znwg.arrange((grib.readGrib(rpath)))    # result包含data,lat,lon,size
    return [interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon) for data in dataset[:56]]
Exemplo n.º 16
0
def predepth():
    # 前一时刻积水深度,此处需在服务器端测试优化
    dr = np.zeros(shape=(801, 1381))   # 目前默认前一时刻积水深度为0
    now = datetime.datetime.now()
    znwgtm = Znwg.znwgtime()
    *_, ftp = Writefile.readxml(glovar.trafficpath, 1)
    grib = Datainterface.GribData()
    remote_url = os.path.join(r'\\ANALYSIS\\CMPA', now.strftime('%Y'), now.strftime('%Y%m%d'))
    localdir = r'/home/cqkj/QHTraffic/Product/Product/mirror/rainlive'
    grib.mirror('FRT_CHN_0P05_3HOR', remote_url, localdir, ftp)
    rname = sorted(os.listdir(localdir))[-1]
    rpath = localdir + rname
    data, lat, lon, _ = Znwg.arrange((grib.readGrib(rpath)))
    data = interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon)
    dataset = data[np.newaxis, ]                # 符合形状要求
    res = FloodModel.cal2(dataset, dr)
    return res[0]
Exemplo n.º 17
0
def windData(path):
    # 获取数据信息
    *_, elements, ftp = Writefile.readxml(path, 2)
    element = elements.split(',')
    ftp = ftp.split(',')
    grib = Datainterface.GribData()
    remote_url = os.path.join(r'\\SPCC\\BEXN', glovar.now.strftime('%Y'),
                              glovar.now.strftime('%Y%m%d'))
    grib.mirror(element[0], remote_url, element[1], ftp, element[2])
    rname = sorted(os.listdir(element[1]))[-1]
    rpath = element[1] + rname
    dataset, lat, lon, _ = Znwg.arrange(
        (grib.readGrib(rpath)))  # result包含data,lat,lon,size
    return [
        interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon)
        for data in dataset
    ]  # 返回插值后列表格式数据
Exemplo n.º 18
0
def mirrorskskt(newlat, newlon, hours=1):
    # 6.14.16测试
    # 从cimiss中获取所有站点地温插值,hours控制选取的时间间隔
    interfaceId = 'getSurfEleInRegionByTimeRange'
    elements = "Station_Id_C,Lat,Lon,GST,Year,Mon,Day,Hour"
    # 设定每天8:00、 20:00运行
    lastime = datetime.datetime.now().replace(
        minute=0, second=0) - datetime.timedelta(hours=8)
    firstime = lastime - datetime.timedelta(hours=hours)
    temp = ('[', firstime.strftime('%Y%m%d%H%M%S'), ',',
            lastime.strftime('%Y%m%d%H%M%S'), ']')
    Time = ''.join(temp)
    params = {
        'dataCode': "SURF_CHN_MUL_HOR",  # 需更改为地面逐小时资料
        'elements': elements,
        'timeRange': Time,
        'adminCodes': "630000"
    }
    initData = Datainterface.cimissdata(interfaceId, elements,
                                        **params)  # 获取出cimiss初始数据
    initData.Time = pd.to_datetime(initData.Time)
    # 需按时间进行划分插值,得到 地温网格数据
    timeList = pd.to_datetime(initData.Time.unique()).strftime(
        '%Y%m%d%H%M%S')  # 此处可能存在问题,目的仅按顺序取出来时间
    initData.set_index(initData.Time, inplace=True)
    oneHourgrid = [
        np.nan_to_num(
            interp.interpolateGridData(
                initData.loc[tm].GST.values.astype('float32'),
                initData.loc[tm].Lat.values.astype('float32'),
                initData.loc[tm].Lon.values.astype('float32'), glovar.lat,
                glovar.lon)) for tm in timeList
    ]
    Time = datetime.datetime.now().strftime('%Y%m%d%H')
    savepath = ''.join(r'/home/cqkj/QHTraffic/tmp/skt', Time, r'.pkl')
    with open(savepath, 'rb') as f:  # 文件名用时间区分精确到小时,供updateSnow使用
        pickle.dump(oneHourgrid, f)

    return oneHourgrid
Exemplo n.º 19
0
 def clcindex(self, sdgrid, lat, lon):
     """
     积雪指数计算
     :param path:道路文件路径 
     :return: 道路指数数据
     """
     with open(self.roadpath, 'rb') as f:
         roadmes = pickle.load(f)
         station_lat, station_lon = roadmes.lat, roadmes.lon
     self.sdgindex = np.piecewise(sdgrid, [
         sdgrid <= 0, (sdgrid > 0) & (sdgrid <= 5),
         (sdgrid > 5) & (sdgrid <= 10), sdgrid > 10
     ], [0, 1, 2, 3])
     self.sdgindex = self.sdgindex.astype('int32')
     for sdgindex in self.sdgindex:
         sdgindex = interp.interpolateGridData(sdgindex,
                                               lat,
                                               lon,
                                               newlat=station_lat,
                                               newlon=station_lon,
                                               isGrid=False)
         self.sdindex.append(sdgindex)
     return self.sdindex
Exemplo n.º 20
0
def clcsnow(ele, snowdeque, modelpath, skt, station):
    # script 2, 6月15待测试, snowdeque为积雪初始时刻文件, moodelpath为积雪模型路径,skt为积雪实况, station为站点df
    # ele 为计算积雪的实况要素
    # 计算出逐一小时积雪,假设初始时刻积雪数据已存在, snowdeque为12个容量的双端队列路径
    with open(modelpath, 'rb') as f:
        model = pickle.load(f)

    with open(snowdeque, 'rb') as f:
        dq = pickle.load(f)  # dq为包含12个元素的双端队列

    t, r, rh = ele
    ele = np.concatenate([
        t.reshape(-1, 1),
        r.reshape(-1, 1),
        rh.reshape(-1, 1),
        skt.reshape(-1, 1), dq[-2].reshape(-1, 1)
    ],
                         axis=1)
    newsnow = model.predict(np.nan_to_num(ele)).reshape(
        901, 1401)  # 假定[ele, data1]符合模型输入方式
    # 添加积雪实况入库
    stationSnow = np.nan_to_num(
        interp.interpolateGridData(newsnow, glovar.lat, glovar.lon,
                                   station.lat.values,
                                   station.lon.values))  # 插值到站点
    # 完成插入并更新操作
    time = datetime.datetime.now().strftime('%Y%m%d%H')
    alltime = np.repeat(time, len(stationSnow))
    df = pd.DataFrame(stationSnow[:, np.newaxis], columns=['snow'])
    Time = pd.DataFrame(alltime[:, np.newaxis], columns=['Time'])
    # dataframe = pd.concat([dt, df, Time], axis=1)

    dq.append(newsnow)  # 新增积雪网格
    with open(snowdeque, 'wb') as f:
        pickle.dump(dq, f)
    return newsnow, r
Exemplo n.º 21
0
def firelevel(data, path, snow, landtype):
    with open(path, 'rb') as f:
        predq = pickle.load(f)
    preres = np.argmax(np.array(predq)[::-1], axis=0)
    tem = np.add.reduce(predq)
    preres[tem == 0] = 8  # ?????????
    *eda, erh, tmp, er = data  # eda?U??V??????????UV?????????????tmp???????????????????????
    refertest = []
    for i in range(len(er)):
        if i == 0:
            test = np.piecewise(er[i], [er[i] < 0.1, er[i] >= 0.1],
                                [1, 0])  # ????????????????
            # np.piecewise(test, [test == 0, test > 0], [0, test+preres])
            test = np.where(test > 0, test + preres, 0)
        else:
            test = np.argmax(er[:i + 1][::-1], axis=0)
            refer = np.add.reduce(er[:i + 1])
            test[refer == 0] = i + 1
            # np.piecewise(test, [test < i+1, test >= i+1], [test, lambda x:x+preres])
            test = np.where(test >= i + 1, test + preres, test)
        refertest.append(test)
    eda = np.sqrt(eda[0]**2 + eda[1]**2)
    edaindex = np.piecewise(
        eda, [(0 <= eda) & (eda < 1.6), (eda >= 1.6) & (eda < 3.5),
              (eda >= 3.5) & (eda < 5.6), (eda >= 5.6) & (eda < 8.1),
              (eda >= 8.1) & (eda < 10.9), (eda >= 10.9) & (eda < 14),
              (eda > 14) & (eda <= 17.2), eda > 17.2],
        [3.846, 7.692, 11.538, 15.382, 19.236, 23.076, 26.923, 30.9])
    tmp -= 273.15  # ???????????
    tmpindex = np.piecewise(tmp, [
        tmp < 5, (tmp >= 5) & (tmp < 11), (tmp >= 11) & (tmp < 16),
        (tmp >= 16) & (tmp < 21), (tmp >= 21) & (tmp <= 25), tmp > 25
    ], [0, 4.61, 6.1, 9.23, 12.5, 15.384])
    erhindex = np.piecewise(erh, [
        erh > 70, (erh >= 60) & (erh <= 70), (erh >= 50) & (erh < 60),
        (erh >= 40) & (erh < 50), (erh >= 30) & (erh <= 40), erh < 30
    ], [0, 3.076, 6.153, 9.23, 12.307, 15.384])
    refertest = np.array(refertest)
    mindex = np.piecewise(refertest, [
        refertest == 0, refertest == 1, refertest == 2, refertest == 3,
        refertest == 4, refertest == 5, refertest == 6, refertest == 7,
        refertest >= 8
    ], [0, 7.692, 11.538, 19.23, 23.076, 26.923, 30.7, 34.615, 38])
    u = edaindex + tmpindex + erhindex + mindex
    rain = np.piecewise(er, [er < 0.1, er >= 0.1], [0, 1])
    rain = [
        interp.interpolateGridData(r, glovar.latt, glovar.lonn, glovar.lat,
                                   glovar.lon) for r in rain
    ]
    rain = np.nan_to_num(np.array(rain))
    u = [
        interp.interpolateGridData(u_, glovar.latt, glovar.lonn, glovar.lat,
                                   glovar.lon) for u_ in u
    ]
    u = np.nan_to_num(np.array(u))
    green = u * landtype[0] * rain * snow  #  ?????????
    forest = u * landtype[1] * rain * snow  #  ????????
    gindex = np.piecewise(green, [
        green <= 25, (green > 25) & (green < 51), (green >= 51) & (green < 73),
        (green >= 73) & (green < 91), green >= 91
    ], [1, 2, 3, 4, 5])
    findex = np.piecewise(forest, [
        forest <= 25,
        (forest > 25) & (forest < 51), (forest >= 51) & (forest < 73),
        (forest >= 73) & (forest < 91), forest >= 91
    ], [1, 2, 3, 4, 5])
    mindex = np.maximum(gindex, findex)  # 气象火险m
    return gindex, findex, mindex
Exemplo n.º 22
0
    def getInfo(self, path, grbs, grb, nlat=None, nlon=None):
        '''
        :return: name, data, lat, lon, size, fh
        '''
        init_time = dt.datetime(grb.year, grb.month, grb.day, grb.hour,
                                grb.minute, grb.second)
        fh = grb.stepRange
        print(fh)
        print(111111111111111111111111)
        if '-' in fh:
            fh = [int(i) for i in fh.split('-')][-1]
        else:
            fh = int(fh)
        # types=39->bcsh  Beijing->Z-  European->ec  US National Weather Service - NCEP->gfs
        # BIN
        types = grb.centreDescription
        print(types)
        if types == '39':
            # [Nj,Ni]
            if 'warr' in path:
                size = 0.03
            elif 'warms' in path:
                size = 0.09
            lat_ = grbs[16].values
            lon_ = grbs[17].values
            lat = np.arange(lat_.min(), lat_.max() + size / 2, size)
            lon = np.arange(lon_.min(), lon_.max() + size / 2, size)
            data = interp.interpolateGridData(grb.values,
                                              lat_,
                                              lon_,
                                              lat,
                                              lon,
                                              method='griddate',
                                              Size=size * 2,
                                              isGrid=True)
        else:
            # ec='European Centre for Medium-Range Weather Forecasts',z_='Beijing'
            # lat,lon= grb.latlons()   #ndim=2
            lat = grb.distinctLatitudes  # ndim=1
            lon = grb.distinctLongitudes  # ndim=1
            if grb.iDirectionIncrementInDegrees != grb.jDirectionIncrementInDegrees:
                raise ValueError('lat_size!=lon_size')
            size = grb.iDirectionIncrementInDegrees
            data = grb.values

        for i in [grb.shortName, grb.name, grb.parameterName]:
            name = i
            print('i is :{}'.format(i))
            if name == 'unknown':
                continue
            else:
                break
        # 智能网格
        if (name == 'unknown') & ('Beijing' in types):
            name = path.split('_')[-3].split('-')[-1]
            print('path:{}'.format(path))
        if name == 'unknown':
            raise ValueError('name is unknown')

        if grb['level'] > 0:
            name = name + '_' + str(grb['level'])
        name = name + '_' + str(fh)
        # print('lat:{}, lon:{}'.format(nlat, nlon))
        # 选择一定范围经纬度的值
        if (nlat is not None) | (nlon is not None):
            if nlat is None:
                nlat = lat
            if nlon is None:
                nlon = lon
            # print('#*20')
            # print('lat:{}, lon:{}'.format(nlat, nlon))
            if types == '39':
                ind_lat = np.where((lat >= nlat.min())
                                   & (lat <= nlat.max()))[0]

                ind_lon = np.where((lon >= nlon.min())
                                   & (lon <= nlon.max()))[0]
                data = data[ind_lat, :]
                data = data[:, ind_lon]
                lat, lon = lat[ind_lat], lon[ind_lon]
            else:
                print("This is a test")
                # print(grb.data())
                data, lats, lons = grb.data(lat1=nlat.min(),
                                            lat2=nlat.max(),
                                            lon1=nlon.min(),
                                            lon2=nlon.max())
                lat = lats[:, 0]
                lon = lons[0, :]

        return init_time, name, data, lat, lon, size