Example #1
0
def icele():
    # 获取ec数据
    dics = ['ECMWF_HR\SKINT', 'ECMWF_HR\APCP', 'ECMWF_HR\SNOD']
    ectime = ecmwf.ecreptime()
    fh = [i for i in range(12, 181, 3)
          ]  # 20点的预报获取今天8:00的ec预报(此处可能存在问题,搞清12为5-8,还是8-11)
    lonlatset, dataset = [], []
    for dic in dics:
        lon, lat, data = Datainterface.micapsdata(ectime, dic, fh)
        lonlatset.append(lon, lat)
        for i in range(data.shape[0] - 1):
            # 此处参考233上的改动
            newdata = []
            if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]):
                data[i] = data[i + 1] / 2
                data[i + 1] = data[i + 1] / 2
                newdata.append(
                    interp.interpolateGridData(data[i], lat, lon, glovar.lat,
                                               glovar.lon))
            else:
                newdata.append(
                    interp.interpolateGridData(data[i], lat, lon, glovar.lat,
                                               glovar.lon))
        dataset.append(np.array(newdata))  # 保存插值后的数据集,data形状应为(57,X, X)
    return dataset
Example #2
0
def main():
    saltedata = saltedata(path)
    snowpre = np.random.randint(0, 1, size=(801 * 1381, 1))
    snow = SnowDepth()
    rep = ecmwf.ecreptime()
    fh = [i for i in range(12, 181, 3)]
    region = [
        float(i) for i in ','.join(
            Writefile.readxml(
                r'/home/cqkj/QHTraffic/Product/Traffic/SNOD/config.xml',
                0)).split(',')
    ]
    new_lon = np.arange(region[0], region[2], region[-1])
    new_lat = np.arange(region[1], region[3], region[-1])
    lonlatset, dataset = [], []
    # 提取数据及经纬度(双重循环,看能否改进)
    for dic in snow.dics:
        lon, lat, data = Datainterface.micapsdata(rep, dic, fh)
        lonlatset.append((lon, lat))
        for i in range(data.shape[0] - 1):
            if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]):
                data[i] = data[i + 1] / 2
                data[i + 1] = data[i + 1] / 2
                interp.interpolateGridData(data, lat, lon, new_lat, new_lon)
            else:
                interp.interpolateGridData(data, lat, lon, new_lat, new_lon)
        dataset.append(data)  # 保存插值后的数据集
    depthgrid = snow.clcsd(dataset, new_lat, new_lon, saltedata, snowpre)
    snow.write(depthgrid, new_lat, new_lon)
    dangerindex = snow.clcindex(depthgrid, new_lat, new_lon)
    snow.write(dangerindex, type=1)
Example #3
0
def snowData():
    # 获取ec数据信息(气温、降水、地温、湿度、积雪深度)
    ectime = ecmwf.ecreptime()
    fh = [i for i in range(12, 181, 3)]  # 20点的预报获取今天8:00的ec预报
    # *_, dics = Writefile.readxml(glovar.trafficpath, 0)
    *_, dics = Writefile.readxml(
        r'/home/cqkj/LZD/Product/Product/config/Traffic.xml', 0)
    dicslist = dics.split(',')[:-1]
    lonlatset, dataset = [], []
    for dic in dicslist:
        newdata = []
        lon, lat, data = Datainterface.micapsdata(ectime, dic, fh)
        lonlatset.append((lon, lat))
        for i in range(data.shape[0] - 1):
            if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]):
                data[i] = data[i + 1] / 2
                data[i + 1] = data[i + 1] / 2
                newdata.append(
                    interp.interpolateGridData(data[i], lat, lon, glovar.lat,
                                               glovar.lon))
            else:
                newdata.append(
                    interp.interpolateGridData(data[i], lat, lon, glovar.lat,
                                               glovar.lon))
        newdata = np.array(newdata)
        # newdata[newdata<0] = 0                    # 保证数据正确性
        dataset.append(newdata)  # 保存插值后的数据集
    return np.array(dataset)
Example #4
0
def presnow():
    # 得到前一时刻积雪深度
    ectime = ecmwf.ecreptime()
    fh = [0]
    dic = 'ECMWF_HR/SNOD'
    lon, lat, data = Datainterface.micapsdata(ectime, dic, fh)
    return interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon)
Example #5
0
 def depth2onezero(self, icegrid, lat, lon):
     with open(self.roadpath, 'rb') as f:
         roadmes = pickle.load(f)
     station_lat, station_lon = roadmes.lat, roadmes.lon
     icegrid = np.where(icegrid > 0.05, 1, 0)
     iceindex = []
     for i in range(56):
         iceindex.append(
             interp.interpolateGridData(icegrid[i],
                                        lat,
                                        lon,
                                        station_lat,
                                        station_lon,
                                        isGrid=False))
     iceroad = np.concatenate(iceindex, axis=1)
     return iceroad
Example #6
0
 def clcindex(self, sdgrid, lat, lon):
     """
     积雪指数计算
     :param path:道路文件路径 
     :return: 道路指数数据
     """
     with open(self.roadpath, 'rb') as f:
         roadmes = pickle.load(f)
         station_lat, station_lon = roadmes.lat, roadmes.lon
     self.sdgindex = np.piecewise(sdgrid, [
         sdgrid <= 0, (sdgrid > 0) & (sdgrid <= 5),
         (sdgrid > 5) & (sdgrid <= 10), sdgrid > 10
     ], [0, 1, 2, 3])
     self.sdgindex = self.sdgindex.astype('int32')
     for sdgindex in self.sdgindex:
         sdgindex = interp.interpolateGridData(sdgindex,
                                               lat,
                                               lon,
                                               newlat=station_lat,
                                               newlon=station_lon,
                                               isGrid=False)
         self.sdindex.append(sdgindex)
     return self.sdindex
Example #7
0
def index(sdgrid):
    with open(glovar.roadpath, 'rb') as f:
        roadmes = pickle.load(f)
        station_lat, station_lon = roadmes.lat, roadmes.lon
    sdgindex = np.piecewise(sdgrid, [
        sdgrid <= 0, (sdgrid > 0) & (sdgrid <= 5),
        (sdgrid > 5) & (sdgrid <= 10), sdgrid > 10
    ], [0, 1, 2, 3])
    sdgindex = sdgindex.astype('int32')
    '''
    for sdgindex in sdgindex:
        sdgindex = interp.interpolateGridData(sdgindex, lat, lon, newlat=station_lat, newlon=station_lon, isGrid=False)
        sdindex.append(sdgindex)
    '''
    sdindex = [
        interp.interpolateGridData(sdg,
                                   glovar.lat,
                                   glovar.lon,
                                   newlat=station_lat,
                                   newlon=station_lon,
                                   isGrid=False) for sdg in sdgindex
    ]
    return sdindex
Example #8
0
def firelevel(data, path, snow, landtype):
    """
    ????????????
    :param data: ??????????????????
    :return: greenfirelevel
    """
    ############################################################
    # ?????????§Õ›¥???????????????????????????›¥?pickle
    with open(path, 'rb') as f:
        predq = pickle.load(f)
    preres = np.argmax(np.array(predq)[::-1], axis=0)
    tem = np.add.reduce(predq)
    preres[tem == 0] = 8                       # ?????????
    ##############################################################
    # ??????????????????????????????????????§ß?? 0?? ????1??
    *eda, erh, tmp, er = data                   # eda?U??V??????????UV??????¡Â?????tmp???????????????????????
    refertest = []
    for i in range(len(er)):
        if i == 0:
            test = np.piecewise(er[i], [er[i] < 0.1, er[i] >= 0.1], [1, 0])  # ????????????????
            # np.piecewise(test, [test == 0, test > 0], [0, test+preres])
            test = np.where(test>0, test+preres, 0)
        else:
            test = np.argmax(er[:i + 1][::-1], axis=0)
            refer = np.add.reduce(er[:i + 1])
            test[refer == 0] = i + 1
            # np.piecewise(test, [test < i+1, test >= i+1], [test, lambda x:x+preres])
            test = np.where(test>=i+1, test+preres, test)
        refertest.append(test)
    #############################################################
    # ????????????????????????????
    eda = np.sqrt(eda[0]**2 + eda[1]**2)                                 # ????uv????¡Â????§³
    edaindex = np.piecewise(eda, [(0<=eda)&(eda<1.6), (eda>=1.6)&(eda<3.5), (eda>=3.5)&
                                  (eda<5.6), (eda>=5.6)&(eda<8.1), (eda>=8.1)&(eda<10.9),
                                  (eda >=10.9)&(eda<14),(eda>14)&(eda<=17.2), eda>17.2],
                            [3.846, 7.692, 11.538, 15.382, 19.236, 23.076, 26.923, 30.9])

    tmp -= 273.15                                    # ???????????
    tmpindex = np.piecewise(tmp, [tmp<5, (tmp>=5)&(tmp<11), (tmp>=11)&
                                  (tmp<16), (tmp>=16)&(tmp<21), (tmp>=21)&(tmp<=25),
                                  tmp>25], [0, 4.61, 6.1, 9.23, 12.5, 15.384])

    erhindex = np.piecewise(erh, [erh>70, (erh>=60)&(erh<=70), (erh>=50)&
                                  (erh<60), (erh>=40)&(erh<50), (erh>=30)&(erh<=40),
                                  erh<30], [0, 3.076, 6.153, 9.23, 12.307, 15.384])
    refertest = np.array(refertest)
    mindex = np.piecewise(refertest, [refertest==0, refertest==1, refertest==2, refertest==3,
                                      refertest==4, refertest==5, refertest==6, refertest==7,
                                      refertest>=8], [0, 7.692, 11.538, 19.23, 23.076, 26.923,
                                                      30.7, 34.615, 38])
    u = edaindex + tmpindex + erhindex + mindex
    ###################################################################################
    # ??????????????????????????????????????, ??????????????????
    rain = np.piecewise(er, [er<0.1, er>=0.1], [0, 1])
    rain = [interp.interpolateGridData(r, glovar.latt, glovar.lonn, glovar.lat, glovar.lon) for r in rain]
    rain = np.nan_to_num(np.array(rain))

    u = [interp.interpolateGridData(u_, glovar.latt, glovar.lonn, glovar.lat, glovar.lon) for u_ in u]
    u = np.nan_to_num(np.array(u))
    green = u*landtype[0]*rain*snow                                 #  ?????????
    forest = u*landtype[1]*rain*snow                                #  ????????
    ###################################################################################
    # ????????????????????
    gindex = np.piecewise(green, [green<=25, (green>25)&(green<51), (green>=51)&(green<73), (green>=73)&(green<91), green>=91], [1,2,3,4,5])
    findex = np.piecewise(forest, [forest <= 25, (forest > 25) & (forest < 51), (forest >= 51) & (forest < 73),
                                  (forest >= 73) & (forest < 91), forest >= 91], [1, 2, 3, 4, 5])
    mindex = np.maximum(gindex, findex)

    return gindex, findex, mindex
Example #9
0
def firelevel(data, path, snow, landtype):
    """
    计算草原火险等级
    :param data: 气象要素外的其它条件
    :return: greenfirelevel
    """
    ############################################################
    # 采用双端队列存储实况降水数据数据量为八天左右,存储为pickle
    with open(path, 'rb') as f:
        predq = pickle.load(f)
    preres = np.argmax(np.array(predq)[::-1], axis=0)
    tem = np.add.reduce(predq)
    preres[tem == 0] = 8  # 八天干旱情况
    ##############################################################
    # 计算连续无降水日数代码:假定已完成数据编码处理(有降水 0, 无降水1)
    *eda, erh, tmp, er = data  # eda为U、V风速,需根据UV风速求得风速此处tmp为开氏温度,需转化成摄氏温度
    refertest = []
    for i in range(len(er)):
        if i == 0:
            test = np.piecewise(er[i], [er[i] < 0.1, er[i] >= 0.1],
                                [1, 0])  # 结果为连续无降水日数
            # np.piecewise(test, [test == 0, test > 0], [0, test+preres])
            test = np.where(test > 0, test + preres, 0)
        else:
            test = np.argmax(er[:i + 1][::-1], axis=0)
            refer = np.add.reduce(er[:i + 1])
            test[refer == 0] = i + 1
            # np.piecewise(test, [test < i+1, test >= i+1], [test, lambda x:x+preres])
            test = np.where(test >= i + 1, test + preres, test)
        refertest.append(test)
    #############################################################
    # 此处根据火险气象因子查对应火险指数
    eda = np.sqrt(eda[0]**2 + eda[1]**2)  # 根据uv风求得风速大小
    edaindex = np.piecewise(
        eda, [(0 <= eda) & (eda < 1.6), (eda >= 1.6) & (eda < 3.5),
              (eda >= 3.5) & (eda < 5.6), (eda >= 5.6) & (eda < 8.1),
              (eda >= 8.1) & (eda < 10.9), (eda >= 10.9) & (eda < 14),
              (eda > 14) & (eda <= 17.2), eda > 17.2],
        [3.846, 7.692, 11.538, 15.382, 19.236, 23.076, 26.923, 30.9])

    tmp -= 273.15  # 转变为摄氏温度
    tmpindex = np.piecewise(tmp, [
        tmp < 5, (tmp >= 5) & (tmp < 11), (tmp >= 11) & (tmp < 16),
        (tmp >= 16) & (tmp < 21), (tmp >= 21) & (tmp <= 25), tmp > 25
    ], [0, 4.61, 6.1, 9.23, 12.5, 15.384])

    erhindex = np.piecewise(erh, [
        erh > 70, (erh >= 60) & (erh <= 70), (erh >= 50) & (erh < 60),
        (erh >= 40) & (erh < 50), (erh >= 30) & (erh <= 40), erh < 30
    ], [0, 3.076, 6.153, 9.23, 12.307, 15.384])
    refertest = np.array(refertest)
    mindex = np.piecewise(refertest, [
        refertest == 0, refertest == 1, refertest == 2, refertest == 3,
        refertest == 4, refertest == 5, refertest == 6, refertest == 7,
        refertest >= 8
    ], [0, 7.692, 11.538, 19.23, 23.076, 26.923, 30.7, 34.615, 38])
    u = edaindex + tmpindex + erhindex + mindex
    ###################################################################################
    # 订正部分计算(需要积雪深度矩阵、降水矩阵、地表状况矩阵, 先不使用积雪深度矩阵订正)
    rain = np.piecewise(er, [er < 0.1, er >= 0.1], [0, 1])
    rain = [
        interp.interpolateGridData(r, glovar.latt, glovar.lonn, glovar.lat,
                                   glovar.lon) for r in rain
    ]
    rain = np.nan_to_num(np.array(rain))

    u = [
        interp.interpolateGridData(u_, glovar.latt, glovar.lonn, glovar.lat,
                                   glovar.lon) for u_ in u
    ]
    u = np.nan_to_num(np.array(u))
    green = u * landtype[0] * rain * snow  #  草原火险产品
    forest = u * landtype[1] * rain * snow  #  森林火险产品
    ###################################################################################
    # 进行森林火险气象等级划分
    gindex = np.piecewise(green, [
        green <= 25, (green > 25) & (green < 51), (green >= 51) & (green < 73),
        (green >= 73) & (green < 91), green >= 91
    ], [1, 2, 3, 4, 5])
    findex = np.piecewise(forest, [
        forest <= 25,
        (forest > 25) & (forest < 51), (forest >= 51) & (forest < 73),
        (forest >= 73) & (forest < 91), forest >= 91
    ], [1, 2, 3, 4, 5])
    mindex = np.maximum(gindex, findex)

    return gindex, findex, mindex