Exemplo n.º 1
0
def icele():
    # 获取ec数据
    dics = ['ECMWF_HR\SKINT', 'ECMWF_HR\APCP', 'ECMWF_HR\SNOD']
    ectime = ecmwf.ecreptime()
    fh = [i for i in range(12, 181, 3)
          ]  # 20点的预报获取今天8:00的ec预报(此处可能存在问题,搞清12为5-8,还是8-11)
    lonlatset, dataset = [], []
    for dic in dics:
        lon, lat, data = Datainterface.micapsdata(ectime, dic, fh)
        lonlatset.append(lon, lat)
        for i in range(data.shape[0] - 1):
            # 此处参考233上的改动
            newdata = []
            if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]):
                data[i] = data[i + 1] / 2
                data[i + 1] = data[i + 1] / 2
                newdata.append(
                    interp.interpolateGridData(data[i], lat, lon, glovar.lat,
                                               glovar.lon))
            else:
                newdata.append(
                    interp.interpolateGridData(data[i], lat, lon, glovar.lat,
                                               glovar.lon))
        dataset.append(np.array(newdata))  # 保存插值后的数据集,data形状应为(57,X, X)
    return dataset
Exemplo n.º 2
0
def Weatherdata(path):
    # ?????????????????????
    elements, subdirs, localdir, _, freq, *ftp = Writefile.readxml(path, 1)
    now = datetime.datetime.now()
    elements = elements.split(',')
    subdirs = subdirs.split(',')
    remote_urls = [os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d')) for subdir in subdirs]  # ??????

    grib = Datainterface.GribData()
    '''
    [grib.mirror(element, remote_url, localdir, freq, ftp) for element, remote_url in
     zip(elements[:-1], remote_urls[:-1])]  # ???????³†??????????????????(24003)
     '''
    for element, remote_url in zip(elements[:-1], remote_urls[:-1]):
        grib.mirror(element, remote_url, localdir, freq, ftp)

    grib.mirror(elements[-1], remote_urls[-1], localdir, '24024', ftp)  # ???????????
    # ???????????????????§Ò????????????????pattern
    strings = ','.join(os.listdir(localdir))
    patterns = [r'(\w+.EDA.*?.GRB2)', r'(\w+.ERH.*?.GRB2)', r'(\w+.TMP.*?.GRB2)', r'(\w+.ER24.*?.GRB2)']
    allpath = [localdir + sorted(Znwg.regex(pattern, strings), key=str.lower)[-1] for pattern in patterns] # allpath??????????????????§Ò?
    ele14list = slice(1, 74, 8)  # ??+2-1??????????¦Ä??10?????14?????????
    ####????????wind????u???v??
    wind = grib.readGrib(allpath[0])[0]
    windu_v = np.array([v for _, v in wind.items()])
    windu, windv  = windu_v[::2][ele14list], windu_v[1::2][ele14list]
    data = np.array([Znwg.arrange(grib.readGrib(path))[0][ele14list] for path in allpath[1:-1]])  # ?????????????????
    #er, lat, lon, size = Znwg.arrange(grib.readGrib(allpath[-1], nlat=glovar.lat, nlon=glovar.lon))  # ???????????????????¦Ã??????????????????
    er, lat, lon, size = Znwg.arrange([grib.readGrib(allpath[-1], nlat=glovar.latt, nlon=glovar.lonn)][0])
    result = windu, windv, data, er  # ??????????[4,10,181,277]????
    return result, lat, lon
Exemplo n.º 3
0
def main():
    saltedata = saltedata(path)
    snowpre = np.random.randint(0, 1, size=(801 * 1381, 1))
    snow = SnowDepth()
    rep = ecmwf.ecreptime()
    fh = [i for i in range(12, 181, 3)]
    region = [
        float(i) for i in ','.join(
            Writefile.readxml(
                r'/home/cqkj/QHTraffic/Product/Traffic/SNOD/config.xml',
                0)).split(',')
    ]
    new_lon = np.arange(region[0], region[2], region[-1])
    new_lat = np.arange(region[1], region[3], region[-1])
    lonlatset, dataset = [], []
    # 提取数据及经纬度(双重循环,看能否改进)
    for dic in snow.dics:
        lon, lat, data = Datainterface.micapsdata(rep, dic, fh)
        lonlatset.append((lon, lat))
        for i in range(data.shape[0] - 1):
            if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]):
                data[i] = data[i + 1] / 2
                data[i + 1] = data[i + 1] / 2
                interp.interpolateGridData(data, lat, lon, new_lat, new_lon)
            else:
                interp.interpolateGridData(data, lat, lon, new_lat, new_lon)
        dataset.append(data)  # 保存插值后的数据集
    depthgrid = snow.clcsd(dataset, new_lat, new_lon, saltedata, snowpre)
    snow.write(depthgrid, new_lat, new_lon)
    dangerindex = snow.clcindex(depthgrid, new_lat, new_lon)
    snow.write(dangerindex, type=1)
Exemplo n.º 4
0
def presnow():
    # 得到前一时刻积雪深度
    ectime = ecmwf.ecreptime()
    fh = [0]
    dic = 'ECMWF_HR/SNOD'
    lon, lat, data = Datainterface.micapsdata(ectime, dic, fh)
    return interp.interpolateGridData(data, lat, lon, glovar.lat, glovar.lon)
Exemplo n.º 5
0
def snowData():
    # 获取ec数据信息(气温、降水、地温、湿度、积雪深度)
    ectime = ecmwf.ecreptime()
    fh = [i for i in range(12, 181, 3)]  # 20点的预报获取今天8:00的ec预报
    # *_, dics = Writefile.readxml(glovar.trafficpath, 0)
    *_, dics = Writefile.readxml(
        r'/home/cqkj/LZD/Product/Product/config/Traffic.xml', 0)
    dicslist = dics.split(',')[:-1]
    lonlatset, dataset = [], []
    for dic in dicslist:
        newdata = []
        lon, lat, data = Datainterface.micapsdata(ectime, dic, fh)
        lonlatset.append((lon, lat))
        for i in range(data.shape[0] - 1):
            if (np.isnan(data[i]).all() == True) and (i + 1 <= data.shape[0]):
                data[i] = data[i + 1] / 2
                data[i + 1] = data[i + 1] / 2
                newdata.append(
                    interp.interpolateGridData(data[i], lat, lon, glovar.lat,
                                               glovar.lon))
            else:
                newdata.append(
                    interp.interpolateGridData(data[i], lat, lon, glovar.lat,
                                               glovar.lon))
        newdata = np.array(newdata)
        # newdata[newdata<0] = 0                    # 保证数据正确性
        dataset.append(newdata)  # 保存插值后的数据集
    return np.array(dataset)
Exemplo n.º 6
0
def saltepath(path):
    # 处理卫星数据,最后返回文件列表,path为卫星数据目录
    if not os.path.exists(path):
        print('The given path does not exist! Do not use modis data!')
        return None
    now = dt.datetime.now().strftime('%Y%m%d')
    pattern = r'(\w' + now + '.*?tif)'  # 匹配卫星文件名
    strings = os.listdir(path)
    tiflists = Znwg.regex(pattern, strings)  # 返回当前卫星数据文件列表
    if len(tiflists) == 0:
        print(r'MODIS tiff don\'t exist, call model_2!')
        return None
    else:
        print('MODIS exists, call model_1!')
        gdal.AllRegister()
        dataset = gdal.Open(tiflists)
        rtf = Datainterface.ReadTiff(dataset)
        px, py = rtf.imagexy2pro()
        pro2geov = np.vectorize(rtf.pro2geo)
        lon, lat = pro2geov(px, py)  # 此处执行很慢,循环操作
        # newlat =np.linspace(31.4,39.4,801)
        # newlon =np.linspace(89.3,103.1,1381)
        # *_, newdata = rtf.equalatlon('SnowDepth', dataset.ReadAsArray(), lat, lon, newlat, newlon)
        *_, newdata = rtf.equalatlon('SnowDepth', dataset.ReadAsArray(), lat,
                                     lon, glovar.lat, glovar.lon)
        return newdata
Exemplo n.º 7
0
def roadicgrid():
    # 此程序用来同步cimiss站点数据
    interfaceId = 'getSurfEleInRegionByTimeRange'
    elements = "Station_Id_C,Lat,Lon,Road_ICE_Depth,Year,Mon,Day,Hour"
    lastime = dt.datetime.now().replace(minute=0,
                                        second=0) - dt.timedelta(hours=8)
    fristime = lastime - dt.timedelta(hours=3)
    temp = ('[', fristime.strftime('%Y%m%d%H%M%S'), ',',
            lastime.strftime('%Y%m%d%H%M%S'), ')')
    Time = ''.join(temp)
    params = {
        'dataCode': "SURF_CHN_TRAFW_MUL",
        'elements': elements,
        'timeRange': Time,
        'adminCodes': "630000"
    }
    initnaldata = Datainterface.cimissdata(interfaceId, elements,
                                           **params)  #  initnaldata 需先进行类型修改
    initnaldata.Road_ICE_Depth = pd.to_numeric(initnaldata.Road_ICE_Depth)
    initnaldata.Lat = pd.to_numeric(initnaldata.Lat)
    initnaldata.Lon = pd.to_numeric(initnaldata.Lon)

    initnaldata.reset_index(inplace=True)
    o2tdata = cimissdata.onehour2threehour(initnaldata)
    nonandata = cimissdata.cleandata(o2tdata, 999, 'Road_ICE_Depth')
    #####################################################################
    roadpath = r'E:\LZD\青海项目代码\qhroadic\eightroadmessage.pickle'
    roadpath = r'/home/cqkj/LZD/Product/Product/Source/QHroad_update.csv'
    roaddf = pd.read_csv(roadpath, index_col=0)

    roaddf = readpickle(roadpath)
    #####################################################################
    roadvalue = cimissdata.insert2road(
        nonandata, roaddf)  # 得到插值后道路点的结冰数据,坐标则是有roaddf的lat、lon提供
    roadvalue = pd.DataFrame(roadvalue)
    # roadvalue.to_csv(r'C:\Users\GJW\Desktop\test.csv', index=None, mode=a)
    cmpath = r'/home/cqkj/LZD/Product/Product/Source/iceday.pkl'
    if not path.exists(cmpath):
        dq = deque(roadvalue[np.newaxis, ...], maxlen=96)
        with open(cmpath, 'wb') as f:
            pickle.dump(dq, f)
    else:
        with open(cmpath, 'rb') as f:
            dq = pickle.load(f)
        dq.append(roadvalue[np.newaxis, ...])
        with open(cmpath, 'wb') as f:
            pickle.dump(dq, f)
    return None
Exemplo n.º 8
0
def Weatherdata(path):
    # 获取森林火险所需气象数据
    elements, subdirs, localdir, _, freq, *ftp = Writefile.readxml(path, 1)
    now = datetime.datetime.now()
    elements = elements.split(',')
    subdirs = subdirs.split(',')
    remote_urls = [
        os.path.join(subdir, now.strftime('%Y'), now.strftime('%Y%m%d'))
        for subdir in subdirs
    ]  # 待构造

    grib = Datainterface.GribData()
    '''
    [grib.mirror(element, remote_url, localdir, freq, ftp) for element, remote_url in
     zip(elements[:-1], remote_urls[:-1])]  # 同时同步大风、相对湿度、气温要素数据(24003)
     '''
    for element, remote_url in zip(elements[:-1], remote_urls[:-1]):
        grib.mirror(element, remote_url, localdir, freq, ftp)

    grib.mirror(elements[-1], remote_urls[-1], localdir, '24024',
                ftp)  # 同步出降水要素
    # 此处应改为提取出不同要素列表,目前简单实现,构造四个pattern
    strings = ','.join(os.listdir(localdir))
    patterns = [
        r'(\w+.EDA.*?.GRB2)', r'(\w+.ERH.*?.GRB2)', r'(\w+.TMP.*?.GRB2)',
        r'(\w+.ER24.*?.GRB2)'
    ]
    allpath = [
        localdir + sorted(Znwg.regex(pattern, strings), key=str.lower)[-1]
        for pattern in patterns
    ]  # allpath应为四个最新同步的文件列表
    ele14list = slice(1, 74, 8)  # (+2-1)前三个要素未来10天每天14时数据索引
    ####第一个要素wind包含u风和v风
    wind = grib.readGrib(allpath[0])[0]
    windu_v = np.array([v for _, v in wind.items()])
    windu, windv = windu_v[::2][ele14list], windu_v[1::2][ele14list]
    data = np.array([
        Znwg.arrange(grib.readGrib(path))[0][ele14list]
        for path in allpath[1:-1]
    ])  # 读取出前三项数据信息
    #er, lat, lon, size = Znwg.arrange(grib.readGrib(allpath[-1], nlat=glovar.lat, nlon=glovar.lon))  # 降水为国家级资料,先查看经纬度信息是否与前三者一致
    er, lat, lon, size = Znwg.arrange(
        [grib.readGrib(allpath[-1], nlat=glovar.latt, nlon=glovar.lonn)][0])
    result = windu, windv, data, er  # 最终数据应为[4,10,181,277]矩阵
    return result, lat, lon
Exemplo n.º 9
0
def liverain(path, pklpath):
    # ???????ZNWG????????????ݴ?pickle
    # ????????????????????????????
    elements, _, localdir, historydir, freq, *ftp = Writefile.readxml(path, 1)
    now = datetime.datetime.now()
    ytd = now - datetime.timedelta(days=1)
    dir = r'/SCMOC/BEXN'
    remote_url = os.path.join(dir, ytd.strftime('%Y'), ytd.strftime('%Y%m%d'))
    grb = Datainterface.GribData()
    grb.mirror('ER24', remote_url, localdir, '24024', ftp)  # ??????????????
    rainpath = sorted(os.listdir(localdir))[-1]
    os.chdir(localdir)
    rainlive, lat, lon, res = Znwg.arrange([grb.readGrib(rainpath, nlat=glovar.latt, nlon=glovar.lonn)][0])
    ####??????????????????
    with open(pklpath, 'rb') as f:
        data = pickle.load(f)
    data.append(rainlive)
    # §Õ??deque????
    with open(pklpath, 'wb') as f:
        pickle.dump(data, f)
    return rainlive