Exemple #1
0
 def _create_lut(self):
     if self.error:
         return
     # 创建查找表
     lookup_table = prj_core(
         self.cmd, self.res, unit="deg", row=self.row, col=self.col)
     # 通过查找表和经纬度数据生成数据在全球的行列信息
     lookup_table.create_lut(self.lons, self.lats)
     self.ii = lookup_table.lut_i
     self.jj = lookup_table.lut_j
    def proj_gome(self):
        # 初始化投影参数
        lookup_table = prj_core(self.cmd, self.res, self.row, self.col)
        # 返回投影查找表,投影中心点
        ii, jj = lookup_table.lonslats2ij(self.centre_lon, self.centre_lat)
        for i in range(30):
            for j in range(24):
                self.centre_row[i][j] = ii[24 * i + j]
                self.centre_col[i][j] = jj[24 * i + j]

        # 投影角点
        ii, jj = lookup_table.lonslats2ij(self.corner_lon, self.corner_lat)
        for i in range(30):
            for j in range(24):
                for m in range(4):
                    self.corner_row[i][j][m] = ii[i * 24 * 4 + j * 4 + m]
                    self.corner_col[i][j][m] = jj[i * 24 * 4 + j * 4 + m]
Exemple #3
0
    def _get_proj_info(self):
        if self.error:
            return
        # 创建查找表
        proj = prj_core(
            self.cmd, self.res, unit="deg", row=self.row, col=self.col)
        # 通过查找表和经纬度数据生成数据在全球的行列信息
        proj_i, proj_j = proj.lonslats2ij(self.lons, self.lats)  # 数据在全球投底图行列号
        data_row, data_col = self.lons.shape
        data_i, data_j = np.mgrid[0:data_row:1, 0:data_col:1]  # 数据在数据中的行列号

        # 过滤有效行列号
        condition = np.logical_and.reduce((proj_i >= 0, proj_i < self.row,
                                           proj_j >= 0, proj_j < self.col))
        self.lut_ii = proj_i[condition]  # 有效数据在全球投底图行列号
        self.lut_jj = proj_j[condition]  # 有效数据在全球投底图行列号
        self.data_ii = data_i[condition]  # 有效数据在数据中的行列号
        self.data_jj = data_j[condition]  # 有效数据在数据中的行列号
Exemple #4
0
def main(inYamlFile, mode):
    T1 = datetime.now()

    # 01 ICFG = 输入配置文件类 ##########
    ICFG = ReadYaml(inYamlFile)

    # 02 MCFG = 阈值配置文件类
    modeFile = os.path.join(
        MainPath, 'cfg', '%s+%s_%s+%s.colloc' % (ICFG.sat1, ICFG.sensor1, ICFG.sat2, ICFG.sensor2))
    MCFG = ReadModeYaml(modeFile)
    if not MCFG.rewrite:
        print 'skip'
        return
    # DCLC = DATA DCLC 匹配结果类
    DCLC = COLLOC_COMM(ICFG.row, ICFG.col, MCFG.chan1)

    T2 = datetime.now()
    print 'read config:', (T2 - T1).total_seconds()

    # 判断是否重写
    if MCFG.rewrite:
        rewrite_mask = True
    else:
        if os.path.isfile(ICFG.ofile):
            rewrite_mask = False

    if rewrite_mask:
        T1 = datetime.now()
        # 03 解析 第一颗传感器的L1数据 ##########
        for inFile in ICFG.ifile1:
            if '1' in mode:
                print 'L1'
                D1 = CLASS_MERSI_L1()
                D1.Load(inFile)
                D1.sun_earth(ICFG.ymd[0:8])
            elif '0' in mode:
                print 'L1 PRE'
                D1 = CLASS_MERSI_L1_PRE()
                D1.Load(inFile)
                D1.get_coeff(ICFG.coeff_file)
                D1.get_new_ref()
                D1.sun_earth(ICFG.ymd[0:8])

            # 04 投影,简历查找表  ##########
            print ICFG.cmd
            P1 = prj_core(ICFG.cmd, ICFG.res, row=ICFG.row, col=ICFG.col)
            P1.create_lut(D1.Lons, D1.Lats)
            # 05 解析 第二颗传感器的L1数据   ##########

            for inFile2 in ICFG.ifile2:
                D2 = CLASS_MODIS_L1()
                D2.Load(inFile2)

                # 06 投影,简历查找表  ##########
                P2 = prj_core(ICFG.cmd, ICFG.res, row=ICFG.row, col=ICFG.col)
                P2.create_lut(D2.Lons, D2.Lats)
                # 07 粗匹配 ##########
                DCLC.save_rough_data(P1, P2, D1, D2, MCFG)

        T2 = datetime.now()
        print 'rough:', (T2 - T1).total_seconds()
        # 08 精匹配  和订正可见光通道的ref值 ##########
        T1 = datetime.now()
        DCLC.save_fine_data(MCFG)
        T2 = datetime.now()
        print 'colloc:', (T2 - T1).total_seconds()

        # 09 输出匹配结果 ##########
        T1 = datetime.now()
        DCLC.write_hdf5(ICFG, MCFG)
        T2 = datetime.now()
        print 'write:', (T2 - T1).total_seconds()
def main(inYamlFile):
    T1 = datetime.now()

    ##########01 ICFG = 输入配置文件类 ##########
    ICFG = ReadYaml(inYamlFile)

    # 02 MCFG = 阈值配置文件类
    modeFile = os.path.join(
        MainPath, 'cfg', '%s+%s_%s+%s.colloc' %
        (ICFG.sat1, ICFG.sensor1, ICFG.sat2, ICFG.sensor2))
    MCFG = ReadModeYaml(modeFile)
    # DCLC = DATA DCLC 匹配结果类
    DCLC = COLLOC_COMM(ICFG.row, ICFG.col, MCFG.chan1)

    T2 = datetime.now()
    print 'read config:', (T2 - T1).total_seconds()

    # 判断是否重写
    if os.path.isfile(ICFG.ofile):
        rewrite_mask = True
    else:
        rewrite_mask = False

    if not rewrite_mask:
        T1 = datetime.now()
        ##########03 解析 第一颗传感器的L1数据 ##########
        for inFile in ICFG.ifile1:
            if 'MERSI' == ICFG.sensor1 and 'FY3C' in ICFG.sat1:
                D1 = CLASS_MERSI_L1()
                D1.Load(inFile)
            elif 'VIRR' == ICFG.sensor1 and 'FY3C' in ICFG.sat1:
                D1 = CLASS_VIRR_L1()
                D1.Load(inFile)
            elif 'IRAS' == ICFG.sensor1 and 'FY3C' in ICFG.sat1:
                D1 = CLASS_IRAS_L1()
                D1.Load(inFile)
            elif 'MERSI' == ICFG.sensor1 and 'FY3D' in ICFG.sat1:
                D1 = CLASS_MERSI2_L1()
                LutFile = ''
                D1.LutFile = LutFile
                D1.Load(inFile)
            elif 'AHI' == ICFG.sensor1:
                D1 = CLASS_HMW8_L1()
                D1.Load(inFile)
                geoFile = 'fygatNAV.Himawari08.xxxxxxx.000001_minmin.hdf'
                D1.Loadgeo(geoFile)
            else:
                print 'sensor1:%s not support' % ICFG.sensor1

            ##########04 投影,简历查找表  ##########
            P1 = prj_core(ICFG.cmd, ICFG.res, row=ICFG.row, col=ICFG.col)
            P1.create_lut(D1.Lons, D1.Lats)

            ##########05 解析 第二颗传感器的L1数据   ##########
            for inFile2 in ICFG.ifile2:
                if 'MODIS' == ICFG.sensor2:
                    D2 = CLASS_MODIS_L1()
                    D2.Load(inFile2)

                elif 'IASI' == ICFG.sensor2:
                    D2 = CLASS_IASI_L1(MCFG.chan1)
                    D2.Load(inFile2)
                    D2.get_rad_tbb(D1, MCFG.chan1)

                elif 'GOME' == ICFG.sensor2:
                    D2 = CLASS_GOME_L1(MCFG.chan1)
                    D2.Load(inFile2)
                    D2.get_rad_tbb(D1, MCFG.chan1)

                elif 'VIIRS' == ICFG.sensor2:
                    D2 = CLASS_VIIRS_L1()
                    D2.Load(inFile2)

                elif 'CRIS' == ICFG.sensor2:
                    if 'SCRIF' in inFile2:
                        print '全分辨率 cris'
                        D2 = CLASS_CRIS_L1(MCFG.chan1)
                        D2.LoadFull(inFile2)
                        D2.gapFilling()
                        D2.get_rad_tbb(D1, MCFG.chan1)
                    else:
                        print '低辨率 cris'
                        D2 = CLASS_CRIS_L1(MCFG.chan1)
                        D2.Load(inFile2)
                        D2.get_rad_tbb(D1, MCFG.chan1)

                else:
                    print 'sensor1:%s not support' % ICFG.sensor2

                ##########06 投影,简历查找表  ##########
                P2 = prj_core(ICFG.cmd, ICFG.res, row=ICFG.row, col=ICFG.col)
                P2.create_lut(D2.Lons, D2.Lats)
                ##########07 粗匹配 ##########
                DCLC.save_rough_data(P1, P2, D1, D2, MCFG)

        T2 = datetime.now()
        print 'rough:', (T2 - T1).total_seconds()
    else:
        T1 = datetime.now()
        DCLC.reload_data(ICFG, MCFG)
        T2 = datetime.now()
        print 'reload:', (T2 - T1).total_seconds()
    ##########08 精匹配  和订正可见光通道的ref值 ##########
    T1 = datetime.now()
    DCLC.save_fine_data(MCFG)
    DCLC.correct_target_ref_data()  # 最后一步 必须放在最后
    T2 = datetime.now()
    print 'colloc:', (T2 - T1).total_seconds()

    ##########09 输出匹配结果 ##########
    if rewrite_mask:
        T1 = datetime.now()
        DCLC.rewrite_hdf5(ICFG, MCFG)
        T2 = datetime.now()
        print 'rewrite:', (T2 - T1).total_seconds()
    elif MCFG.rewrite:
        T1 = datetime.now()
        DCLC.write_hdf5(ICFG, MCFG)
        T2 = datetime.now()
        print 'write:', (T2 - T1).total_seconds()

    ##########10 对结果进行绘图 ##########
    if MCFG.drawmap:
        T1 = datetime.now()
        DCLC.draw_dclc(ICFG, MCFG)
        T2 = datetime.now()
        print 'map:', (T2 - T1).total_seconds()
Exemple #6
0
    def draw(self, in_file, proj_file, dataset_name, vmin=None, vmax=None):
        if self.error:
            return
        # 加载 Proj 数据
        if os.path.isfile(proj_file):
            try:
                with h5py.File(proj_file, 'r') as h5:
                    lut_ii = h5.get("lut_ii")[:]
                    lut_jj = h5.get("lut_jj")[:]
                    data_ii = h5.get("data_ii")[:]
                    data_jj = h5.get("data_jj")[:]
            except Exception as why:
                print why
                print "Can't open file: {}".format(proj_file)
                return
        else:
            print "File does not exist: {}".format(proj_file)
            return

        with time_block("Draw load", switch=TIME_TEST):
            # 加载产品数据
            if os.path.isfile(in_file):
                try:
                    with h5py.File(in_file, 'r') as h5:
                        proj_value = h5.get(dataset_name)[:][data_ii, data_jj]
                except Exception as why:
                    print why
                    print "Can't open file: {}".format(in_file)
                    return
            else:
                print "File does not exist: {}".format(in_file)
                return

        if vmin is not None:
            vmin = vmin
        if vmax is not None:
            vmax = vmax

        p = dv_map.dv_map()
        p.title = "{}    {}".format(dataset_name, self.ymd)

        # 增加省边界
        #       p1.show_china_province = True
        p.delat = 30
        p.delon = 30
        p.show_line_of_latlon = False
        #         p.colormap = 'gist_rainbow'
        #         p.colormap = 'viridis'
        #         p.colormap = 'brg'

        # 创建查找表
        lookup_table = prj_core(
            self.cmd, self.res, unit="deg", row=self.row, col=self.col)
        lookup_table.grid_lonslats()
        lons = lookup_table.lons
        lats = lookup_table.lats

        # 创建完整的数据投影
        value = np.full((self.row, self.col), self.fill_value, dtype='f4')

        value[lut_ii, lut_jj] = proj_value
        value = np.ma.masked_less_equal(value, 0)  # 掩掉 <=0 的数据

        # 乘数据的系数,水色产品为 0.001
        slope = 0.001
        value = value * slope

        p.easyplot(lats, lons, value, ptype=None, vmin=vmin,
                   vmax=vmax, markersize=0.1, marker='o')

        out_png_path = os.path.dirname(in_file)
        out_png = os.path.join(out_png_path, '{}.png'.format(dataset_name))
        pb_io.make_sure_path_exists(os.path.dirname(out_png))
        p.savefig(out_png, dpi=300)
Exemple #7
0
    def combine(self):
        if self.error:
            return

        # 如果输出文件已经存在,跳过
        elif os.path.isfile(self.ofile):
            self.error = True
            print "File is already exist, skip it: {}".format(self.ofile)
            return
        # 合成日数据
        elif pb_io.is_none(self.ifile, self.pfile, self.ofile):
            self.error = True
            print "Is None: ifile or pfile or ofile: {}".format(self.yaml_file)
            return
        elif len(self.ifile) < 1:
            self.error = True
            print "File count lower than 1: {}".format(self.yaml_file)

        fill_value = -32767
        for file_idx, in_file in enumerate(self.ifile):
            proj_file = self.pfile[file_idx]
            if os.path.isfile(in_file) and os.path.isfile(proj_file):
                print "*" * 100
                print "Start combining file:"
                print "<<< {}\n<<< {}".format(in_file, proj_file)
            else:
                print "File is not exist: {} OR {}".format(in_file, proj_file)
                continue

            # 加载 proj 数据
            self.load_proj_data(proj_file)
            # 日合成
            with time_block("One combine time:", switch=TIME_TEST):
                try:
                    with h5py.File(in_file, 'r') as h5:
                        for k in h5.keys():
                            # 记录属性信息
                            if k not in self.attrs:
                                self.attrs[k] = pb_io.attrs2dict(
                                    h5.get(k).attrs)

                            if k == "Longitude" or k == "Latitude":
                                continue
                            elif k not in self.out_data:
                                if k == "Ocean_Flag":
                                    self.out_data[k] = np.full(
                                        (self.row, self.col),
                                        fill_value,
                                        dtype='i4')
                                else:
                                    self.out_data[k] = np.full(
                                        (self.row, self.col),
                                        fill_value,
                                        dtype='i2')
                            # 合并一个数据
                            proj_data = h5.get(k)[:]
                            self.out_data[k][self.lut_ii,
                                             self.lut_jj] = proj_data[
                                                 self.data_ii, self.data_jj]

                except Exception as why:
                    print why
                    print "Can't combine file, some error exist: {}".format(
                        in_file)

        with time_block("Grid to lons and lats time:", switch=TIME_TEST):
            if "Longitude" not in self.out_data:
                lookup_table = prj_core(self.cmd,
                                        self.res,
                                        unit="deg",
                                        row=self.row,
                                        col=self.col)
                lookup_table.grid_lonslats()
                self.out_data["Longitude"] = lookup_table.lons
                self.out_data["Latitude"] = lookup_table.lats

        # 输出数据集有效数据的数量
        keys = [x for x in self.out_data]
        keys.sort()
        for k in keys:
            if self.out_data[k] is None:
                print k
                continue
            idx = np.where(self.out_data[k] > 0)
            print "{:30} : {}".format(k, len(idx[0]))
Exemple #8
0
    def combine(self):
        all_time_start = datetime.utcnow()
        if self.error:
            return
        # 如果输出文件已经存在,跳过
        elif os.path.isfile(self.ofile):
            self.error = True
            print "File is already exist, skip it: {}".format(self.ofile)
            return
        # 合成日数据
        elif pb_io.is_none(self.ifile, self.pfile, self.ofile):
            self.error = True
            print "Is None: ifile or pfile or ofile: {}".format(self.yaml_file)
            return
        elif len(self.ifile) < 1:
            self.error = True
            print "File count lower than 1: {}".format(self.yaml_file)

        self.load_proj_files(self.pfile)  # 读取投影文件

        fill_value = -32767

        with h5py.File(self.ifile[0], 'r') as h5:
            data_names = h5.keys()
            data_names.sort()

            for k in h5.keys():
                # 记录属性信息
                if k not in self.attrs:
                    self.attrs[k] = pb_io.attrs2dict(h5.get(k).attrs)

        file_count = len(self.ifile)
        data_count = list()
        date_start = datetime.utcnow()
        for data_name in data_names:
            if data_name == "Longitude" or data_name == "Latitude" or data_name == "Ocean_Flag":
                continue
            if 'Azimuth' in data_name or 'Zenith' in data_name or data_name == 'chl':
                continue

            data = list()
            count = 0
            # 读取所有的数据
            for in_file in self.ifile:
                filename = os.path.basename(in_file)
                k = filename.split('_1000M')[0]
                data_ii, data_jj = self.data_ij[k]
                with h5py.File(in_file, 'r') as hdf5:
                    data_read = hdf5.get(data_name)[:][data_ii, data_jj]
                    data = np.append(data, data_read)
                date_end = datetime.utcnow() - date_start
                count += 1
                print 'Combine {}  {}/{} {}'.format(data_name, count,
                                                    file_count, date_end)

            condition = np.logical_and(data != 32767, data != -32767)
            condition = np.logical_and(condition, data != 0)
            lut_ii = self.lut_ii[condition]
            lut_jj = self.lut_jj[condition]
            data = data[condition]

            data_count.append((data_name, len(data)))

            if not len(data) == 0:
                ijd = dict()
                date_start = datetime.utcnow()
                for i, j, d in zip(lut_ii, lut_jj, data):
                    local = (i, j)
                    if local not in ijd:
                        ijd[local] = [d]
                    else:
                        data_list = ijd[local]
                        data_list.append(d)
                date_end = datetime.utcnow() - date_start
                print date_end

                local = ijd.keys()
                local = np.array(local)
                lut_ii = local[:, 0].astype(np.int16)
                lut_jj = local[:, 1].astype(np.int16)

                data = ijd.values()

                date_start = datetime.utcnow()
                mean_ = np.vectorize(mean_data)
                data = mean_(data)
                date_end = datetime.utcnow() - date_start
                print date_end

                out_data = np.full((self.row, self.col),
                                   fill_value,
                                   dtype='i2')
                data = data.reshape(-1)
                out_data[lut_ii, lut_jj] = data
            else:
                out_data = np.full((self.row, self.col),
                                   fill_value,
                                   dtype='i2')
            self.write(out_data, data_name, self.ofile)

        # 输出经纬度数据集
        lookup_table = prj_core(self.cmd,
                                self.res,
                                unit="deg",
                                row=self.row,
                                col=self.col)
        lookup_table.grid_lonslats()
        longitude = lookup_table.lons
        latitude = lookup_table.lats
        data_name = "Longitude"
        self.write(longitude, data_name, self.ofile)
        data_name = "Latitude"
        self.write(latitude, data_name, self.ofile)

        # 输出海陆标记和方位角
        for data_name in [
                'SensorAzimuth', 'SensorZenith', 'SolarAzimuth', 'SolarZenith',
                'Ocean_Flag', 'chl'
        ]:
            out_data = np.full((self.row, self.col), fill_value, dtype='i4')
            data = list()
            for in_file in self.ifile:
                filename = os.path.basename(in_file)
                k = filename.split('_1000M')[0]
                data_ii, data_jj = self.data_ij[k]
                with h5py.File(in_file, 'r') as hdf5:
                    data_read = hdf5.get(data_name)[:][data_ii, data_jj]
                    data = np.append(data, data_read)
            data = np.reshape(data, -1)
            lut_ii = self.lut_ii.astype(np.int16)
            lut_jj = self.lut_jj.astype(np.int16)
            out_data[lut_ii, lut_jj] = data
            self.write(out_data, data_name, self.ofile)

        # 输出数据集有效数据的数量
        for count in data_count:
            print "{:30} : {}".format(count[0], count[1])

        all_time_end = datetime.utcnow() - all_time_start
        print all_time_end
Exemple #9
0
def main(inYamlFile):
    T1 = datetime.now()

    # 01 ICFG = 输入配置文件类 ##########
    ICFG = ReadYaml(inYamlFile)

    # 02 MCFG = 阈值配置文件类
    modeFile = os.path.join(
        MainPath, 'cfg', '%s+%s_%s+%s_L3.colloc' %
        (ICFG.sat1, ICFG.sensor1, ICFG.sat2, ICFG.sensor2))
    MCFG = ReadModeYaml(modeFile)
    if not MCFG.rewrite:
        print 'skip'
        return

    P1 = prj_core(ICFG.cmd,
                  ICFG.res,
                  unit='deg',
                  pt_tl=(-179.5, 89.5),
                  pt_br=(179.5, -89.5))
    P2 = prj_core(ICFG.cmd,
                  ICFG.res,
                  unit='deg',
                  pt_tl=(-179.5, 89.5),
                  pt_br=(179.5, -89.5))
    # DCLC = DATA DCLC 匹配结果类
    row, col = P1.lons.shape
    DCLC = COLLOC_COMM(row, col, MCFG.chan1)

    T2 = datetime.now()
    print 'read config:', (T2 - T1).total_seconds()

    # 判断是否重写
    if MCFG.rewrite:
        rewrite_mask = True
    else:
        if os.path.isfile(ICFG.ofile):
            rewrite_mask = False

    if rewrite_mask:
        T1 = datetime.now()
        # 03 解析 第一颗传感器的L1数据 ##########
        for inFile in ICFG.ifile1:
            print 'L3'
            D1 = CLASS_MERSI_L3()
            D1.Load(inFile)

            # 04 投影,简历查找表  ##########
            print ICFG.cmd

            P1.create_lut(D1.Lons, D1.Lats)
            # 05 解析 第二颗传感器的L1数据   ##########

            for inFile2 in ICFG.ifile2:
                D2 = CLASS_MODIS_L3()
                D2.Load(inFile2)

                # 06 投影,简历查找表  ##########
                P2.create_lut(D2.Lons, D2.Lats)
                # 07 粗匹配 ##########
                DCLC.save_rough_data(P1, P2, D1, D2, MCFG)

        T2 = datetime.now()
        print 'rough:', (T2 - T1).total_seconds()
        # 08 精匹配  和订正可见光通道的ref值 ##########
        T1 = datetime.now()
        DCLC.save_fine_data(MCFG)
        T2 = datetime.now()
        print 'colloc:', (T2 - T1).total_seconds()

        # 09 输出匹配结果 ##########
        T1 = datetime.now()
        DCLC.write_hdf5(ICFG, MCFG)
        T2 = datetime.now()
        print 'write:', (T2 - T1).total_seconds()