Example #1
0
def w(ofile, data, sds_base_name, nbands, xrange, yrange, units, rowdimname,
      coldimname):
    "hdf_utils.w(ofile, 'sds_base_name', nbands, xdim, ydim, 'units', 'rowdimname', 'coldimname')"
    "Function to create hdf file ofile and write data to it"
    # Dr. M. Disney, Sep 2011

    # create and write hdf file via SD
    dataopf = SD(ofile, SDC.WRITE | SDC.CREATE | SDC.TRUNC)
    for i in np.arange(0, nbands):
        sds = dataopf.create(sds_base_name + str(i), SDC.FLOAT32,
                             (xrange, yrange))
        sds.name = '' + str(i)
        sds.units = units
        sds.setfillvalue(0)
        dim1 = sds.dim(0)
        dim1.setname(rowdimname)
        dim2 = sds.dim(1)
        dim2.setname(coldimname)
        if nbands > 1:
            sds[:] = np.float32(data[i])
        else:
            sds[:] = np.float32(data)

    sds.endaccess()
    dataopf.end()
def write_interpolated(filename, f0, f1, fact, datasets):
    '''
    interpolate two hdf files f0 and f1 using factor fact, and
    write the result to filename
    '''

    hdf = SD(filename, SDC.WRITE|SDC.CREATE)
    for dataset in datasets:

        try:
            info = SD(f0).select(dataset).info()
        except:
            print >> stderr, 'Error loading %s in %s' % (dataset, f0)
            raise

        typ  = info[3]
        shp  = info[2]
        met0 = SD(f0).select(dataset).get()
        met1 = SD(f1).select(dataset).get()

        interp = (1-fact)*met0 + fact*met1

        interp = interp.astype({
                SDC.INT16: 'int16',
                SDC.FLOAT32: 'float32',
                SDC.FLOAT64: 'float64',
            }[typ])

        # write
        sds = hdf.create(dataset, typ, shp)
        sds[:] = interp[:]
        sds.endaccess()

    hdf.end()
Example #3
0
def _create_fake_dem_file(dem_fn, var_name, fill_value):
    from pyhdf.SD import SD, SDC
    h = SD(dem_fn, SDC.WRITE | SDC.CREATE)
    dem_var = h.create(var_name, SDC.INT16, (10, 10))
    dem_var[:] = np.zeros((10, 10), dtype=np.int16)
    if fill_value is not None:
        dem_var.setfillvalue(fill_value)
    h.end()
Example #4
0
def merge_hdf(input_file, temp_dir, file_basename):
    """
    合并h5文件 以便后续做地理校正
    @param input_file: 输入svi01数据路径
    @param temp_dir: 临时文件夹
    @return: hdf_path: 合并后的hdf文件路径
    """
    # 输出合并结果hdf文件
    merge_hdf_path = os.path.join(temp_dir, file_basename + '_merge.hdf')
    merge_info = {"svi01": (input_file, 'All_Data/VIIRS-I1-SDR_All/Radiance'),
                  "svi02": (input_file.replace("svi01.h5", "svi02.h5"), 'All_Data/VIIRS-I2-SDR_All/Radiance'),
                  "svi03": (input_file.replace("svi01.h5", "svi03.h5"), 'All_Data/VIIRS-I3-SDR_All/Radiance'),
                  "svi04": (input_file.replace("svi01.h5", "svi04.h5"), 'All_Data/VIIRS-I4-SDR_All/Radiance'),
                  "SatelliteAzimuthAngle": (
                  input_file.replace("svi01.h5", "gimgo.h5"), 'All_Data/VIIRS-IMG-GEO_All/SatelliteAzimuthAngle'),
                  "SatelliteZenithAngle": (
                  input_file.replace("svi01.h5", "gimgo.h5"), 'All_Data/VIIRS-IMG-GEO_All/SatelliteZenithAngle'),
                  "SolarAzimuthAngle": (
                  input_file.replace("svi01.h5", "gimgo.h5"), 'All_Data/VIIRS-IMG-GEO_All/SolarAzimuthAngle'),
                  "SolarZenithAngle": (
                  input_file.replace("svi01.h5", "gimgo.h5"), 'All_Data/VIIRS-IMG-GEO_All/SolarZenithAngle'),
                  "Latitude": (input_file.replace("svi01.h5", "gimgo.h5"), 'All_Data/VIIRS-IMG-GEO_All/Latitude'),
                  "Longitude": (input_file.replace("svi01.h5", "gimgo.h5"), 'All_Data/VIIRS-IMG-GEO_All/Longitude')}

    # 创建输出hdf对象
    merge_sd = SD(merge_hdf_path, SDC.CREATE | SDC.WRITE)
    merge_list = ['svi01', 'svi02', 'svi03', 'svi04', 'SatelliteAzimuthAngle', 'SatelliteZenithAngle',
                  'SolarAzimuthAngle', 'SolarZenithAngle', 'Latitude', 'Longitude']
    for i in range(len(merge_list)):
        cur_file_path = merge_info[merge_list[i]][0]
        cur_h5_file = h5py.File(cur_file_path, 'r')
        # 获取数组
        cur_ds = cur_h5_file[merge_info[merge_list[i]][1]]
        cur_data = np.array(cur_ds)
        # 创建hdf中数据集create(数据集名称, 数据类型, 数据大小)
        if merge_list[i] in ['Latitude', 'Longitude', 'SatelliteAzimuthAngle', 'SatelliteZenithAngle',
                             'SolarAzimuthAngle', 'SolarZenithAngle']:
            cur_sd_obj = merge_sd.create(merge_list[i], SDC.FLOAT32, (cur_data.shape[0], cur_data.shape[1]))
        else:
            cur_sd_obj = merge_sd.create(merge_list[i], SDC.UINT16, (cur_data.shape[0], cur_data.shape[1]))
        # 设置数据集数据 传入numpy数组
        cur_sd_obj.set(cur_data)
        cur_sd_obj.endaccess()
    merge_sd.end()
    return merge_hdf_path
Example #5
0
    def setUp(self):
        """Create a test HDF4 file"""
        from pyhdf.SD import SD, SDC
        h = SD('test.hdf', SDC.WRITE | SDC.CREATE | SDC.TRUNC)
        data = np.arange(10. * 100, dtype=np.float32).reshape((10, 100))
        v1 = h.create('ds1_f', SDC.FLOAT32, (10, 100))
        v1[:] = data
        v2 = h.create('ds1_i', SDC.INT16, (10, 100))
        v2[:] = data.astype(np.int16)

        # Add attributes
        h.test_attr_str = 'test_string'
        h.test_attr_int = 0
        h.test_attr_float = 1.2
        # h.test_attr_str_arr = np.array(b"test_string2")
        for d in [v1, v2]:
            d.test_attr_str = 'test_string'
            d.test_attr_int = 0
            d.test_attr_float = 1.2

        h.end()
Example #6
0
    def setUp(self):
        """Create a test HDF4 file."""
        from pyhdf.SD import SD, SDC
        h = SD('test.hdf', SDC.WRITE | SDC.CREATE | SDC.TRUNC)
        data = np.arange(10. * 100, dtype=np.float32).reshape((10, 100))
        v1 = h.create('ds1_f', SDC.FLOAT32, (10, 100))
        v1[:] = data
        v2 = h.create('ds1_i', SDC.INT16, (10, 100))
        v2[:] = data.astype(np.int16)

        # Add attributes
        h.test_attr_str = 'test_string'
        h.test_attr_int = 0
        h.test_attr_float = 1.2
        # h.test_attr_str_arr = np.array(b"test_string2")
        for d in [v1, v2]:
            d.test_attr_str = 'test_string'
            d.test_attr_int = 0
            d.test_attr_float = 1.2

        h.end()
Example #7
0
    def _start_dem_mock(self, tmpdir, url):
        if not url:
            return
        rmock_obj = mock.patch('satpy.modifiers._crefl.retrieve')
        rmock = rmock_obj.start()
        dem_fn = str(tmpdir.join(url))
        rmock.return_value = dem_fn
        from pyhdf.SD import SD, SDC

        h = SD(dem_fn, SDC.WRITE | SDC.CREATE)
        dem_var = h.create("averaged elevation", SDC.FLOAT32, (10, 10))
        dem_var.setfillvalue(-999.0)
        dem_var[:] = np.zeros((10, 10), dtype=np.float32)
        return rmock_obj
Example #8
0
def create_test_data():
    """Create a fake MODIS 35 L2 HDF4 file with headers."""
    from datetime import datetime, timedelta

    base_dir, file_name = generate_file_name()
    h = SD(file_name, SDC.WRITE | SDC.CREATE)
    # Set hdf file attributes
    beginning_date = datetime.now()
    ending_date = beginning_date + timedelta(minutes=5)
    core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \
                           "GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n" \
                           "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n"\
                           "NUM_VAL = 1\nVALUE = \"{}\"\n"\
                           "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n"\
                           "END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = \"{}\"\n" \
                           "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME".format(
                               beginning_date.strftime("%Y-%m-%d"),
                               beginning_date.strftime("%H:%M:%S.%f"),
                               ending_date.strftime("%Y-%m-%d"),
                               ending_date.strftime("%H:%M:%S.%f")
                           )
    struct_metadata_header = "GROUP=SwathStructure\n"\
                             "GROUP=SWATH_1\n"\
                             "GROUP=DimensionMap\n"\
                             "OBJECT=DimensionMap_2\n"\
                             "GeoDimension=\"Cell_Along_Swath_5km\"\n"\
                             "END_OBJECT=DimensionMap_2\n"\
                             "END_GROUP=DimensionMap\n"\
                             "END_GROUP=SWATH_1\n"\
                             "END_GROUP=SwathStructure\nEND"
    archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND"
    setattr(h, 'CoreMetadata.0', core_metadata_header)  # noqa
    setattr(h, 'StructMetadata.0', struct_metadata_header)  # noqa
    setattr(h, 'ArchiveMetadata.0', archive_metadata_header)  # noqa

    # Fill datasets
    for dataset in TEST_DATA:
        v = h.create(dataset, TEST_DATA[dataset]['type'],
                     TEST_DATA[dataset]['data'].shape)
        v[:] = TEST_DATA[dataset]['data']
        dim_count = 0
        for dimension_name in TEST_DATA[dataset]['attrs']['dim_labels']:
            v.dim(dim_count).setname(dimension_name)
            dim_count += 1
        v.setfillvalue(TEST_DATA[dataset]['fill_value'])
        v.scale_factor = TEST_DATA[dataset]['attrs'].get(
            'scale_factor', SCALE_FACTOR)
    h.end()
    return base_dir, file_name
def write_hdf(_fname, _lat, _lon, _data, _var_name, _long_name, _units):
    # Create file.
    d = SD(_fname, SDC.WRITE | SDC.CREATE)

    # Create lat.
    nlat = len(_lat)
    lat = d.create('lat', SDC.FLOAT64, nlat)
    d0 = lat.dim(0)
    d0.setname('YDim:EOSGRID')
    lat[:] = _lat
    setattr(lat, 'units', 'degrees_north')

    # Create lon
    nlon = len(_lon)
    lon = d.create('lon', SDC.FLOAT64, nlon)
    d1 = lon.dim(0)
    d1.setname('XDim:EOSGRID')
    lon[:] = _lon
    setattr(lon, 'units', 'degrees_east')

    # Create var.
    v = d.create(_var_name, SDC.FLOAT64, (nlat, nlon))
    v0 = v.dim(0)
    v1 = v.dim(1)
    v0.setname('YDim:EOSGRID')
    v1.setname('XDim:EOSGRID')
    v[:] = _data
    v.setcompress(SDC.COMP_DEFLATE, 5)
    setattr(v, 'long_name', _long_name)
    setattr(v, 'units', _units)

    # Close datasets.
    v.endaccess()
    lon.endaccess()
    lat.endaccess()
    d.end()
Example #10
0
def write_hdf(filename, dataset, data):
    '''
    write a dataset in hdf file
    '''

    hdf = SD(filename, SDC.WRITE|SDC.CREATE)

    typ = {
            'int16'   : SDC.INT16,
            'float32' : SDC.FLOAT32,
            'float64' : SDC.FLOAT64,
            }[data.dtype.name]

    sds = hdf.create(dataset, typ, data.shape)
    sds[:] = data[:]
    sds.endaccess()

    hdf.end()
def write_interpolated(filename, f0, f1, fact, datasetNames):
    '''
    interpolate two hdf files f0 and f1 using factor fact, and
    write the result to filename
    '''

    hdf = SD(filename, SDC.WRITE|SDC.CREATE)
    for datasetName in datasetNames:

        try:
            info = SD(f0).select(datasetName).info()
        except:
            print >> stderr, 'Error loading %s in %s' % (datasetName, f0)
            raise

        typ  = info[3]
        shp  = info[2]
        sds_in1 = SD(f0).select(datasetName)
        met0 = sds_in1.get()
        met1 = SD(f1).select(datasetName).get()

        interp = (1-fact)*met0 + fact*met1

        interp = interp.astype({
                SDC.INT16: 'int16',
                SDC.FLOAT32: 'float32',
                SDC.FLOAT64: 'float64',
            }[typ])

        # write
        sds = hdf.create(datasetName, typ, shp)
        sds[:] = interp[:]

        # copy attributes
        attr = sds_in1.attributes()
        if len(attr) > 0:
            for name in attr.keys():
                setattr(sds, name, attr[name])
        sds.endaccess()

    hdf.end()
Example #12
0
def WriteToHDF(outHDFFile, refResultArr, processStatus):

    hdfFile = SD(outHDFFile, SDC.WRITE | SDC.CREATE | SDC.TRUNC)

    # Assign a few attributes at the file level
    hdfFile.author = 'author'
    hdfFile.priority = 2
    bandT = 0

    for i in range(0, CF.Nbands):
        if (not processStatus[i]):
            continue

        #print refResultArr[::bandT].shape, refResultArr[::bandT].shape[0], refResultArr[::bandT].shape[1]

        # Create a dataset named 'd1' to hold a 3x3 float array.
        #d1 = hdfFile.create(CF.refBandNames[i], SDC.UINT16, (refResultArr.shape[0], refResultArr.shape[1]))
        d1 = hdfFile.create(CF.refBandNames[i], SDC.FLOAT32,
                            (refResultArr.shape[0], refResultArr.shape[1]))

        # Set some attributs on 'd1'
        d1.description = 'simple atmosphere correction for ' + CF.refBandNames[
            i]
        d1.units = 'Watts/m^2/micrometer/steradian'

        # Name 'd1' dimensions and assign them attributes.
        dim1 = d1.dim(0)
        dim2 = d1.dim(1)

        # Assign values to 'd1'
        d1[:] = refResultArr[:, :, bandT]

        #print refResultArr[:,:,bandT].shape

        bandT += 1

        d1.endaccess()

    hdfFile.end()
def main():
    infile = ''
    maskfile = ''
    outfile = ''
    try:
        opts, args = getopt.getopt(sys.argv[1:], "hi:f:o:",
                                   ["indir=", "filelist=", "outfile="])
    except getopt.GetoptError:
        print(sys.argv[0], ' -i <indir> -f <filelist> -o <outfile>')
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print(sys.argv[0], ' -i <indir> -f <filelist> -o <outfile>')
            sys.exit()
        elif opt in ("-i", "--indir"):
            indir = arg
        elif opt in ("-f", "--filelist"):
            filelist = arg
        elif opt in ("-o", "--outfile"):
            outfile = arg

    # Define some constants
    PFT_varname = 'Land_Cover_Type_1'

    # Parse filelist
    fileList = []
    fileList = filelist.lstrip().rstrip().split(',')
    nFiles = len(fileList)

    # Read files
    for k in range(nFiles):
        filename = indir + '/' + fileList[k]
        if (k == 0):
            print("reading file to get dimensions", filename)
            hdf = SD(filename, SDC.READ)
            nrows_modis = hdf.datasets()[PFT_varname][1][0]
            ncols_modis = hdf.datasets()[PFT_varname][1][1]
            hdf.end()
            data = np.full([nrows_modis, ncols_modis, nFiles],
                           255,
                           dtype='int16')
#      rowvals = np.full([nrows_modis,ncols_modis]),0,dtype='int32')
#      colvals = np.full([nrows_modis,ncols_modis]),0,dtype='int32')
#      for i in range(nrows_modis):
#        rowvals[i,:] = i
#      for j in range(ncols_modis):
#        colvals[:,j] = j
        print("reading file", filename)
        try:
            hdf = SD(filename, SDC.READ)
            tmpdata = hdf.select(PFT_varname)
            data[:, :, k] = tmpdata[:, :]
            hdf.end()
        except:
            data[:, :,
                 k] = data[:, :, k -
                           1]  # repeat previous (doesn't work if first file has an error)

    flag = np.full([nrows_modis, ncols_modis], 255, dtype='int16')
    pft = np.full([nrows_modis, ncols_modis], 255, dtype='int16')
    for i in range(nrows_modis):
        print('processing row', i)
        for j in range(ncols_modis):
            (pft_tmp, flag_tmp) = decide_pft(data[i, j, :])
            pft[i, j] = pft_tmp
            flag[i, j] = flag_tmp


#      if (i < 10 and pft[i,j] != data[i,j,0]):
#        print(i,j,'data',data[i,j,:],'pft',pft[i,j],'flag',flag[i,j])

    hdf = SD(outfile, SDC.WRITE | SDC.CREATE)
    sds_pft = hdf.create(PFT_varname, SDC.INT16, (nrows_modis, ncols_modis))
    sds_flag = hdf.create('Data_Flag', SDC.INT16, (nrows_modis, ncols_modis))
    sds_pft.setfillvalue(255)
    sds_flag.setfillvalue(255)
    dim1 = sds_pft.dim(0)
    dim1.setname('row')
    dim2 = sds_pft.dim(1)
    dim2.setname('col')
    dim3 = sds_flag.dim(0)
    dim3.setname('row')
    dim4 = sds_flag.dim(1)
    dim4.setname('col')
    sds_pft[:, :] = pft[:, :]
    sds_flag[:, :] = flag[:, :]
    sds_pft.endaccess()
    sds_flag.endaccess()
    hdf.end()
Example #14
0
dl_data[(rad_sol <= -1)] = day1

day2 = rad_sol[(rad_sol < 1) & (rad_sol > -1)] 
day2 = 24.0 * np.arccos(day2)/np.pi
dl_data[(rad_sol < 1) & (rad_sol > -1)] = day2

day3 = rad_sol[(rad_sol >= 1)]
day3 = 0.0
dl_data[(rad_sol >= 1)] = day3

dl_data = dl_data.reshape(1080,2160)
print "day length complete"

####################################################################
#   Calculates NPP for each pixel by multiplying values for that   #
#   pixel from pbopt, zeu, tot_chl, par and dl together.           #
#   Produces output hdf file containing NPP data.                  #
####################################################################  

NPP_data = np.zeros_like(dl_data)

NPP_data = pbopt_data * zeu_data * chl_data * par_data * dl_data 

OUTPUT1 = "C:\Anaconda\envs\sat_data\NPP_3.hdf"
NPP = SD(OUTPUT1, SDC.WRITE | SDC.CREATE)
sds = NPP.create("sds1", SDC.FLOAT64,(1080,2160))
sds.setfillvalue(0)
sds[:] = NPP_data
sds.endaccess()
NPP.end()
Example #15
0
def createHDF(allvars):
    varinfo = {
        0:
        'nametimestamp',
        1:
        'calipso fname',
        2:
        'mod21km fname',
        3:
        'mod03 fname',
        4:
        'myd35 fnames',
        5: ['Latitude', {
            'units': 'deg',
            'valid_range': '-90.0...90.0'
        }],
        6: ['Longitude', {
            'units': 'deg',
            'valid_range': '-90.0...90.0'
        }],
        7:
        ['IGBP_Surface_Type', {
            'units': 'no units',
            'valid_range': '1... 18'
        }],
        8: ['DEM_Surface_Elevation', {
            'units': 'km'
        }],
        9: ['Layer_Top_Altitude', {
            'units': 'km',
            'fill_value': '-9999'
        }],
        10: ['Feature_Classification_Flags', {}],
        11: [
            'SCM_classification', {
                'valid_range': '0,1',
                'description': '1:Layered\n0:clear'
            }
        ],
        12: [
            'Clear_Layered_Mask', {
                'valid_range':
                '0...4',
                'description':
                '4:Invalid\n3:Cloud+Aerosol\n2:Aerosol\n1:Cloud\n0:Clear'
            }
        ],
        13: ['Cloud_Mask', {}],
        14: [
            'Confusion_Matrix_SCM', {
                'valid_range':
                '-2,-1,1,2',
                'description':
                '2:True Layered\n1:True Clear\n-1:False Clear\n-2:False Layered'
            }
        ],
        15: [
            'Confusion_Matrix_CM', {
                'valid_range':
                '-2,-1,1,2',
                'description':
                '2:True Layered\n1:True Clear\n-1:False Clear\n-2:False Layered'
            }
        ],
        16: ['Number_Layers_Found', {}],
        17: ['SensorZenith', {
            'units': 'deg',
            'valid_range': '0.0...180.0'
        }],
        18: ['SensorAzimuth', {
            'units': 'deg',
            'valid_range': '0.0...180.0'
        }],
        19:
        ['Solar_Zenith_Angle', {
            'units': 'deg',
            'valid_range': '0.0...180.0'
        }],
        20: [
            'Solar_Azimuth_Angle', {
                'units': 'deg',
                'valid_range': '0.0...180.0'
            }
        ],
        21: ['Layer_Base_Altitude', {
            'units': 'km'
        }],
        22: ['Layer_Top_Pressure', {}],
        23: ['Midlayer_Pressure', {}],
        24: ['Layer_Base_Pressure', {}],
        25: ['Layer_Top_Temperature', {}],
        26: ['Layer_Centroid_Temperature', {}],
        27: ['Midlayer_Temperature', {}],
        28: ['Layer_Base_Temperature', {}],
        29: ['CAD_Score', {
            'fill_value': '-127'
        }],
        30: ['Initial_CAD_Score', {
            'fill_value': '-127'
        }],
        31: [
            'Profile_UTC_Time', {
                'units': 'no units',
                'valid_range': '60,426.0...261,231.0'
            }
        ],
        32: [
            'Snow_Ice_Surface_Type', {
                'units': 'no units',
                'valid_range': '0.0...255.0'
            }
        ],
        33:
        ['Scattering_Angle', {
            'units': 'deg',
            'valid_range': '0.0...180.0'
        }],
        34: ['Skill_Score', {}],
        35: ['Hit_Rate', {}]
    }

    data_types = {
        'float64': SDC.FLOAT64,
        'float32': SDC.FLOAT32,
        'int32': SDC.INT32,
        'uint32': SDC.UINT32,
        'int16': SDC.INT16,
        'uint16': SDC.UINT16,
        'int8': SDC.INT8,
        'uint8': SDC.UINT8,
        '<U11': SDC.UCHAR
    }

    filename = 'CALTRACK-333m_SCM_V1-1_' + allvars[
        0] + '.hdf'  # Create HDF file.
    print(filename)
    path = 'E:\\Custom_HDFs\\'
    hdfFile = SD(path + filename, SDC.WRITE | SDC.CREATE)
    # Assign a few attributes at the file level
    hdfFile.File_Name = filename
    hdfFile.Timestamp = allvars[0]
    hdfFile.Fill_Value = -9999
    hdfFile.Description = 'SCM, VFM, MYD35 cloud classifications coincident with the CALIOP 333m track'
    fused = f'{allvars[1]}\n{allvars[2]}\n{allvars[3]}\n' + '\n'.join(
        str(s) for s in allvars[4])
    hdfFile.Files_Used = fused

    #Parametirize this
    print('SKILL SCORES:', allvars[34])
    hdfFile.Skill_Scores = f'MYD35:{allvars[34][0]}\nSCM:{allvars[34][1]}'
    hdfFile.Hit_Rates = f'MYD35:{allvars[35][0]}\nSCM:{allvars[35][1]}'
    hdfFile.Processing_Time = datetime.datetime.now().strftime(
        "%Y-%m-%d %H:%M:%S")

    for i in range(5, len(allvars) - 2):
        #Check if array is 2D
        try:
            allvars[i].shape
            arr = allvars[i]
        except:
            arr = np.array(allvars[i])

        #Get data type from data_types dictionary
        data_type = data_types.get(str(arr.dtype))

        #Different x value for 2D arrays
        x = 1
        if arr.ndim > 1: x = arr.shape[1]
        v1 = hdfFile.create(varinfo.get(i)[0], data_type, (len(arr), x))

        #GZIP compression
        v1.setcompress(SDC.COMP_DEFLATE, value=1)

        # Set some attributts on 'd1'
        for key in varinfo.get(i)[1]:
            setattr(v1, key, varinfo.get(i)[1][key])

        v1[0:] = arr
        v1.endaccess()  # Close file

    hdfFile.end()
Example #16
0
def main(file_ref,
         file_info,
         subrange,
         aerotype=1,
         altitude=0.01,
         visibility=15,
         path_out=None):
    '''
    @description: 主程序
    @file_ref {str}: DN数据文件
    @file_info {str}: 影像信息数据文件(包含经纬度、传感器方位信息等)
    @subrange {lon_min, lon_max, lat_min, lat_max}
    @return: None
    '''
    nbands = 4
    mask_value = 65533
    # hdf转geotif
    print('reconstruction...')
    hdf_merge = file_ref.replace('.hdf', '_merge.hdf')
    if os.path.exists(hdf_merge):
        os.system('rm %s' % hdf_merge)
    file_sd_out = SD(hdf_merge, SDC.CREATE | SDC.WRITE)
    file_sd1 = SD(file_ref)
    file_sd2 = SD(file_info)
    obj = file_sd2.select('Longitude')
    lon = obj.get()
    obj = file_sd2.select('Latitude')
    lat = obj.get()
    fields = ['Radiance_I%d' % (i + 1) for i in range(nbands)]
    size = None
    for field in fields:
        obji = file_sd1.select(field)
        if size is None:
            size = obji[:].shape
        objo = file_sd_out.create(field, SDC.UINT16, size)
        objo.set(obji[:])
    obji = file_sd2.select('Longitude')
    objo = file_sd_out.create('Longitude', SDC.FLOAT32, size)
    objo.set(obji[:])
    obji = file_sd2.select('Latitude')
    objo = file_sd_out.create('Latitude', SDC.FLOAT32, size)
    objo.set(obji[:])
    obji.endaccess()
    objo.endaccess()
    file_sd_out.end()
    for i in range(nbands):
        ofile_tif = file_ref.replace('.hdf', '_band%s_reproj.tif' % str(i + 1))
        cmd = '%s -geoloc -t_srs EPSG:4326 -srcnodata %s HDF4_SDS:UNKNOWN:"%s":%s %s' % (
            global_config['path_gdalwarp'], mask_value, hdf_merge, str(i),
            ofile_tif)
        os.system(cmd)
    # 卫星方位信息
    cut_index = cut_data(subrange, lon, lat)
    # 太阳天顶角
    obj = file_sd2.select('SolarZenithAngle')
    data = obj.get()
    data_cut = data[cut_index[0]:cut_index[1], cut_index[2]:cut_index[3]]
    solz = np.mean(data_cut)
    # 太阳方位角
    obj = file_sd2.select('SolarAzimuthAngle')
    data = obj.get()
    data_cut = data[cut_index[0]:cut_index[1], cut_index[2]:cut_index[3]]
    sola = np.mean(data_cut)
    # 卫星天顶角
    obj = file_sd2.select('SatelliteZenithAngle')
    data = obj.get()
    data_cut = data[cut_index[0]:cut_index[1], cut_index[2]:cut_index[3]]
    salz = np.mean(data_cut)
    # 卫星方位角
    obj = file_sd2.select('SatelliteAzimuthAngle')
    data = obj.get()
    data_cut = data[cut_index[0]:cut_index[1], cut_index[2]:cut_index[3]]
    sala = np.mean(data_cut)
    center_lonlat = [(subrange[0] + subrange[1]) / 2,
                     (subrange[2] + subrange[3]) / 2]
    raster = gdal.Open(file_ref.replace('.hdf', '_band1_reproj.tif'))
    xsize = raster.RasterXSize
    ysize = raster.RasterYSize
    geo_trans = raster.GetGeoTransform()
    proj_ref = raster.GetProjectionRef()
    # 计算裁切范围
    target_lon_min = 116.28
    target_lon_max = 125.0
    target_lat_min = 30.0
    target_lat_max = 37.83
    colm_s = int(round((target_lon_min - geo_trans[0]) / geo_trans[1]))
    colm_e = int(round((target_lon_max - geo_trans[0]) / geo_trans[1]))
    line_s = int(round((target_lat_max - geo_trans[3]) / geo_trans[5]))
    line_e = int(round((target_lat_min - geo_trans[3]) / geo_trans[5]))
    if colm_s < 0:
        colm_s = 0
    if line_s < 0:
        line_s = 0
    if colm_e >= xsize:
        colm_e = xsize - 1
    if line_e >= ysize:
        line_e = ysize - 1
    x_1d = np.array([geo_trans[0] + i * geo_trans[1] for i in range(xsize)])
    y_1d = np.array([geo_trans[3] + i * geo_trans[5] for i in range(ysize)])
    xx, yy = np.meshgrid(x_1d, y_1d)
    xx_sub = xx[line_s:line_e, colm_s:colm_e]
    yy_sub = yy[line_s:line_e, colm_s:colm_e]
    # 文件保存所需信息
    date_str = os.path.split(file_ref)[1].split('.')[1]
    time_str = os.path.split(file_ref)[1].split('.')[2]
    date_str = date_teanslator.jd_to_cale(date_str[1:])
    year = int(date_str.split('.')[0])
    month = int(date_str.split('.')[1])
    day = int(date_str.split('.')[2])
    hour = int(time_str[0:2])
    minute = int(time_str[2:])
    date_str = '%d%02d%02d%02d%02d%02d' % (year, month, day, hour + 8, minute,
                                           0)
    nrow = os.path.split(file_ref)[1].split('.')[3]
    out_name = 'NPP_VIIRS_375_L2_%s_%s_00.tif' % (date_str, nrow)
    raster_fn_out = os.path.join(path_out, out_name)
    driver = gdal.GetDriverByName('GTiff')
    target_ds = driver.Create(raster_fn_out, xsize, ysize, nbands,
                              gdal.GDT_UInt16)
    target_ds.SetGeoTransform(geo_trans)
    target_ds.SetProjection(proj_ref)
    # 大气校正
    for i in range(nbands):
        obj_name = 'Radiance_I' + str(i + 1)
        obj = file_sd1.select(obj_name)
        raster = gdal.Open(
            file_ref.replace('.hdf', '_band%s_reproj.tif' % str(i + 1)))
        data = raster.GetRasterBand(1).ReadAsArray()
        print('重采样:Band %s' % (i + 1))
        data_sub = data[line_s:line_e, colm_s:colm_e]
        blank_key = data_sub == mask_value
        # OpenCV形态学处理
        blank_key[:, 0] = 0
        blank_key[:, -1] = 0
        blank_key[0, :] = 0
        blank_key[-1, :] = 0
        labels_struct = cv2.connectedComponentsWithStats(blank_key.astype(
            np.uint8),
                                                         connectivity=4)
        for i_label in range(1, labels_struct[0]):
            if labels_struct[2][i_label][4] > 1e5:
                blank_key[labels_struct[1] == i_label] = 0
        lon_blank = xx_sub[blank_key]
        lat_blank = yy_sub[blank_key]
        valid_key = np.logical_not(blank_key)
        lon_valid = xx_sub[valid_key]
        lat_valid = yy_sub[valid_key]
        lonlat = np.vstack((lon_valid, lat_valid)).T
        data_valid = data_sub[valid_key]
        data_blank = griddata(lonlat,
                              data_valid, (lon_blank, lat_blank),
                              method='nearest')
        data_sub[blank_key] = data_blank
        data[line_s:line_e, colm_s:colm_e] = data_sub
        mask = data == mask_value
        print('I%d 辐射定标和大气校正 ...' % (i + 1))
        info = obj.attributes()
        scale = info['Scale']
        offset = info['Offset']
        radi_cali = data.astype(float) * scale + offset
        date_str = os.path.split(file_ref)[1].split('.')[1]
        date_str = date_teanslator.jd_to_cale(date_str[1:])
        month = int(date_str.split('.')[1])
        day = int(date_str.split('.')[2])
        mtl_coef = {
            'altitude': altitude,
            'visibility': visibility,
            'aero_type': aerotype,
            'location': center_lonlat,
            'month': month,
            'day': day,
            'solz': solz,
            'sola': sola,
            'salz': salz,
            'sala': sala
        }
        atms_corr = arms_corr(radi_cali, mtl_coef, i + 161)
        # save
        data_tmp = (atms_corr * 10000).astype(np.int)
        data_tmp[mask] = mask_value
        target_ds.GetRasterBand(i + 1).WriteArray(data_tmp)
        band = target_ds.GetRasterBand(i + 1)
        band.SetNoDataValue(mask_value)
    target_ds = None
    file_sd1.end()
    file_sd2.end()
    # 删除过程文件
    os.system('rm %s' % hdf_merge)
    for i in range(nbands):
        os.system(
            'rm %s' %
            (file_ref.replace('.hdf', '_band%s_reproj.tif' % str(i + 1))))
    return (raster_fn_out)
Example #17
0
def createhdf(filename, table=[]):
    hdf = SD(filename, SDC.CREATE | SDC.WRITE)
    for i in table:
        hdf.create(i, SDC.FLOAT32, 0)
    hdf.end()
    return True
def RemoveErro_and_RadCal(Hdfname_in):
    '''
    @异常处理与辐射定标
    '''
    filedir=os.path.dirname(Hdfname_in)
    hdf=SD(Hdfname_in,SDC.READ)
    
    #经纬度信息层
    Lat = hdf.select('Latitude').get()
    Lon = hdf.select('Longitude').get()
    #去除异常地域信息,经纬度异常值-999
    loc=np.where(Lat==-999)[0]#定位到异常行索引,ndarray
    #删除以后
    Lat_=np.delete(Lat,loc,axis=0)
    Lon_=np.delete(Lon,loc,axis=0)
    #定标信息
    sds_obj = hdf.select('EV_250_RefSB')
    sds_info = sds_obj.attributes()
    scales = sds_info['radiance_scales']
    offsets = sds_info['radiance_offsets']
    #Aqua数据信息层,两波段
    Rad0=hdf.select('EV_250_RefSB').get()[0,:,:]
    Rad1=hdf.select('EV_250_RefSB').get()[1,:,:]
    
    # 辐射定标
    Rad0_cor=scales[0]*Rad0+offsets[0]
    Rad1_cor=scales[1]*Rad1+offsets[1]
    
    #数据信息层EV_250_RefSB,数据信息层剔除值索引为经纬度索引的4倍
    num_begin=0
    for i in range(Lat.shape[0]):
        if -999 in Lat[i,:]:
            num_begin=num_begin+1
            continue
        else:
            break
    num_end=0
    for j in range(i,Lat.shape[0]):
        if -999 in Lat[j,:]:
            num_end=num_end+1
    num_begin_=range(0,num_begin*4)
    num_end_=range((Lat.shape[0]-num_end)*4,Lat.shape[0]*4)

    #辐射定标后——删除异常
    #第一波段
    Rad0_cor_=np.delete(Rad0_cor,num_begin_,axis=0)
    Rad0_cor__=np.delete(Rad0_cor_,[i-num_begin*4 for i in num_end_],axis=0)
    #第二波段
    Rad1_cor_=np.delete(Rad1_cor,num_begin_,axis=0)
    Rad1_cor__=np.delete(Rad1_cor_,[i-num_begin*4 for i in num_end_],axis=0)

    #创建新的hdf数据,写入辐射定标后的数据
    fileout=os.path.join(filedir,'Rad_Cal_and_RemoveErro_hdf')
    if not os.path.exists(fileout):
        os.makedirs(fileout)

    Remove_hdf_path = os.path.join(fileout,os.path.basename(Hdfname_in)[:-4]+'RemoveErro.hdf')
    #不存在文件
    if os.path.exists(Remove_hdf_path):
        pass
    else:
        # 创建输出hdf对象
        New_sd = SD(Remove_hdf_path, SDC.CREATE | SDC.WRITE)
        # 创建hdf中数据集create
        cur_sd_obj = New_sd.create('EV_250_RefSB_b1', SDC.FLOAT64, (Rad0_cor__.shape[0], Rad0_cor__.shape[1]))
        cur_sd_obj.set(Rad0_cor__)
        cur_sd_obj = New_sd.create('EV_250_RefSB_b2', SDC.FLOAT64, (Rad1_cor__.shape[0], Rad1_cor__.shape[1]))
        cur_sd_obj.set(Rad1_cor__)
        cur_sd_obj = New_sd.create('Latitude', SDC.FLOAT32, (Lat_.shape[0], Lat_.shape[1]))
        cur_sd_obj.set(Lat_)
        cur_sd_obj = New_sd.create('Longitude', SDC.FLOAT32, (Lon_.shape[0], Lon_.shape[1]))
        cur_sd_obj.set(Lon_)

        cur_sd_obj.endaccess()
        New_sd.end()
        hdf.end()
    return Remove_hdf_path
dl_data[(rad_sol <= -1)] = day1

day2 = rad_sol[(rad_sol < 1) & (rad_sol > -1)]
day2 = 24.0 * np.arccos(day2) / np.pi
dl_data[(rad_sol < 1) & (rad_sol > -1)] = day2

day3 = rad_sol[(rad_sol >= 1)]
day3 = 0.0
dl_data[(rad_sol >= 1)] = day3

dl_data = dl_data.reshape(1080, 2160)
print "day length complete"

####################################################################
#   Calculates NPP for each pixel by multiplying values for that   #
#   pixel from pbopt, zeu, tot_chl, par and dl together.           #
#   Produces output hdf file containing NPP data.                  #
####################################################################

NPP_data = np.zeros_like(dl_data)

NPP_data = pbopt_data * zeu_data * chl_data * par_data * dl_data

OUTPUT1 = "C:\Anaconda\envs\sat_data\NPP_3.hdf"
NPP = SD(OUTPUT1, SDC.WRITE | SDC.CREATE)
sds = NPP.create("sds1", SDC.FLOAT64, (1080, 2160))
sds.setfillvalue(0)
sds[:] = NPP_data
sds.endaccess()
NPP.end()
Example #20
0
class lfmstartup():
    """
   Class for creating initial input files for LFM code
   Eventually this will include LFM, MFLFM
   Right now only LFM is supported
   """
    def __init__(self, fileName, dims, nspecies=1):
        """
        Create the HDF file
         Inputs:
         fileName - Name of file to create
         dims - (NI,NJ,NK) tuple of grid size
         nspecies - number of ion speices (default 1)
        """
        (self.ni, self.nj, self.nk) = dims
        self.fileName = fileName
        self.varNames = [
            'X_grid', 'Y_grid', 'Z_grid', 'rho_', 'vx_', 'vy_', 'vz_', 'c_',
            'bx_', 'by_', 'bz_', 'bi_', 'bj_', 'bk_', 'ei_', 'ej_', 'ek_',
            'ai_', 'aj_', 'ak_'
        ]
        if (nspecies > 1):
            for i in range(1, nspecies + 1):
                for var in ['rho_.', 'vx_.', 'vy_.', 'vz_.', 'c_.']:
                    self.varNames.append(var + str(i))

        self.varUnits = [
            'cm', 'cm', 'cm', 'g/cm^3', 'cm/s', 'cm/s', 'cm/s', 'cm/s',
            'gauss', 'gauss', 'gauss', 'gauss*cm^2', 'gauss*cm^2',
            'gauss*cm^2', 'cgs*cm', 'cgs*cm', 'cgs*cm', 'dummy', 'dummy',
            'dummy'
        ]

        if (nspecies > 1):
            for i in range(1, nspecies + 1):
                for var in ['g/cm^3', 'cm/s', 'cm/s', 'cm/s', 'cm/s']:
                    self.varUnits.append(var)

    def open(self, mjd=0.0, tzero=3000.0):
        """
       Open the HDF file and set the global attributes
       Inputs:
           MJD - Modified Julian Date - default 0.0
           tzero - Solar wind initialization time - default 3000.0
           
       """
        self.f = SD(self.fileName, mode=SDC.WRITE | SDC.CREATE)
        self.setGlobalAttr(mjd, tzero)
        self.initVar()

        return

    def setGlobalAttr(self, mjd, tzero):
        self.f.attr('mjd').set(SDC.FLOAT64, mjd)
        self.f.attr('time_step').set(SDC.INT32, 0)
        self.f.attr('time_8byte').set(SDC.FLOAT64, 0.)
        self.f.attr('time').set(SDC.FLOAT32, 0.)
        self.f.attr('tilt_angle').set(SDC.FLOAT32, 0.)
        self.f.attr('tzero').set(SDC.FLOAT32, tzero)
        self.f.attr('file_contents').set(SDC.CHAR, 'a')
        self.f.attr('dipole_moment').set(SDC.CHAR, 'b')
        self.f.attr('written_by').set(SDC.CHAR, 'Python initialzer')

        return

    def initVar(self):

        vars = {}
        for varName, varUnit in zip(self.varNames, self.varUnits):
            vars[varName] = self.f.create(
                varName, SDC.FLOAT32, (self.nk + 1, self.nj + 1, self.ni + 1))
            vars[varName].attr('ni').set(SDC.INT32, self.ni + 1)
            vars[varName].attr('nj').set(SDC.INT32, self.nj + 1)
            vars[varName].attr('nk').set(SDC.INT32, self.nk + 1)
            vars[varName].attr('units').set(SDC.CHAR, varUnit)

            vars[varName][:] = n.zeros((self.nk + 1, self.nj + 1, self.ni + 1),
                                       dtype='float32')

    def writeVar(self, varName, arr):
        """
        Writes Array to HDF File
        Inputs
          varName - Name of variable to add
          arr - 3d array to add to file
        """
        iend = arr.shape[2]
        jend = arr.shape[1]
        kend = arr.shape[0]
        self.f.select(varName)[:kend, :jend, :iend] = arr.astype('float32')

        return

    def close(self):
        self.f.end()

        return
Example #21
0
out.set_verticalalignment('bottom')
out.set_rotation(270)
ch30_calibrated.shape

# # Write the calibrated channel out for safekeeping
#
# Follow the example here: https://hdfeos.org/software/pyhdf.php

# In[22]:

# Create an HDF file
outname = "ch30_out.hdf"
sd = SD(outname, SDC.WRITE | SDC.CREATE)

# Create a dataset
sds = sd.create("ch30", SDC.FLOAT64, ch30_calibrated.shape)

# Fill the dataset with a fill value
sds.setfillvalue(0)

# Set dimension names
dim1 = sds.dim(0)
dim1.setname("row")
dim2 = sds.dim(1)
dim2.setname("col")

# Assign an attribute to the dataset
sds.units = "W/m^2/micron/sr"

# Write data
sds[:, :] = ch30_calibrated
parser.add_argument('-d',
                    '--debug',
                    help='Debug logging.',
                    action='store_true')
args = parser.parse_args()

SNOW_LAYER = args.layer
SNOW_SDC = SDC.UINT8

if args.debug:
    logging.basicConfig(level=logging.DEBUG)
elif args.verbose:
    logging.basicConfig(level=logging.INFO)

sd = SD(args.file, SDC.CREATE | SDC.WRITE)
snow = sd.select(args.layer)
masked_snow = np.ma.masked_equal(snow.get(), snow.getfillvalue())
masked_snow._sharedmask = False
lib.convert_snow_to_binary(masked_snow)
data = lib.upsample_snow(masked_snow, lib.masked_binary_logic)
logging.debug("Snow shape: " + str(data.shape))
logging.debug("Snow data: " + str(data))
logging.debug("Unique values: " + str(np.unique(data)))
sds_data = sd.create("upsampled_" + args.layer, SNOW_SDC, (1200, 1200))
sds_data.setfillvalue(snow.getfillvalue())
logging.info("Writing upsampled data from: " + args.file)
sds_data[:] = data
logging.debug("Wrote data with values: " + str(np.unique(sds_data)))
sds_data.endaccess()
sd.end()
    logging.info("Design Matrix shape: " + str(dm.shape))
    logging.debug(str(dm))
    return dm


lst_matrix, lst_day_of_year = build_predictor_matrix(
    args.lst_files, args.first_year, args.last_year, args.t0, args.delta,
    args.eta, LST_LAYER, lib.LST_NO_DATA)
snow_matrix, snow_day_of_year = build_predictor_matrix(
    args.snow_files, args.first_year, args.last_year, args.t0, args.delta,
    args.eta, SNOW_LAYER, lib.FILL_SNOW)
ndvi_matrix = build_ndvi_matrix(args.ndvi_files, args.first_year,
                                args.last_year, NDVI_START, NDVI_END)
design_matrix = build_design_matrix(lst_matrix, snow_matrix, ndvi_matrix)
sd = SD(args.out_file, SDC.WRITE | SDC.CREATE)
sds = sd.create("design_matrix", SDC.FLOAT64, design_matrix.shape)
sds.first_year = args.first_year
sds.last_year = args.last_year
sds.t0 = args.t0
sds.delta = args.delta
sds.eta = args.eta
sds.missing_ratio = args.missing_ratio
sds.snow_mean = args.snow_mean
if args.remove_lst_columns:
    sds.removed_lst_columns = ",".join(str(x) for x in args.remove_lst_columns)
if args.remove_snow_columns:
    sds.removed_snow_columns = ",".join(
        str(x) for x in args.remove_snow_columns)
sds.lst_days = ",".join(str(x) for x in lst_day_of_year)
sds.snow_days = ",".join(str(x) for x in snow_day_of_year)
sds[:] = design_matrix
Example #24
0
Temp = ax.imshow(ch31_temp_rot)
cax = fig.colorbar(Temp)

ax.set_title('Channel 31 Brightness temperature ')
out = cax.ax.set_ylabel('Temperature Kelvin')
out.set_verticalalignment('bottom')
out.set_rotation(270)

print(ch31_temp_rot.shape)

# Create an HDF file
outname = "F:/0nti_modis/ch31_out.hdf"
sd = SD(outname, SDC.WRITE | SDC.CREATE)

# Create a dataset
sds = sd.create("ch31", SDC.FLOAT64, ch31_temp_rot.shape)

# Fill the dataset with a fill value
sds.setfillvalue(0)

# Set dimension names
dim1 = sds.dim(0)
dim1.setname("col")
dim2 = sds.dim(1)
dim2.setname("row")

# Assign an attribute to the dataset
sds.units = 'K'

# Write data
sds[:, :] = ch31_temp_rot
Example #25
0
def main(ifile,
         shp_file,
         center_lonlat,
         cut_range=None,
         aerotype=1,
         altitude=0.01,
         visibility=15,
         band_need=['all'],
         path_out=None):
    nbands = 21
    # 文件解压
    print('文件解压...')
    fz = zipfile.ZipFile(ifile, 'r')
    for file in fz.namelist():
        fz.extract(file, os.path.split(ifile)[0])
    path_name = os.path.split(ifile)[1]
    path_name = path_name.replace('.zip', '.SEN3')
    file_path = os.path.join(os.path.split(ifile)[0], path_name)
    file_list = os.listdir(file_path)
    print('数据预处理...')
    # 通过文件名获取传感器日期
    path_name = os.path.split(file_path)[1]
    if path_name == '':
        print('文件夹错误: %s' % path_name)
        exit(0)
    else:
        date_str = re.findall(r'\d+', path_name)[2]
        time_str = re.findall(r'\d+', path_name)[3]
        year = int(date_str[0:4])
        month = int(date_str[4:6])
        day = int(date_str[6:8])
        hour = int(time_str[0:2])
        minute = int(time_str[2:4])
        date = '%d/%d/%d %d:%d:00' % (year, month, day, hour, minute)
        sola_position = calc_sola_position.main(center_lonlat[0],
                                                center_lonlat[1], date)
        solz = sola_position[0]
        sola = sola_position[1]

    if 'tie_geometries.nc' in file_list:
        f_geometries = h5py.File(os.path.join(file_path, 'tie_geometries.nc'),
                                 'r')
        sala = f_geometries['OAA']
        salz = f_geometries['OZA']
        sala_all = sala[:, :]
        salz_all = salz[:, :]
        f_geometries.close()
    if 'geo_coordinates.nc' in file_list:
        f_coordinates = h5py.File(
            os.path.join(file_path, 'geo_coordinates.nc'), 'r')
        lon = f_coordinates['longitude'][:]
        lat = f_coordinates['latitude'][:]
        lon = lon.astype(float) * 1e-6
        lat = lat.astype(float) * 1e-6
        x_1d = np.linspace(0, lon.shape[1] - 1, lon.shape[1])
        y_1d = np.linspace(0, lon.shape[0] - 1, lon.shape[0])
        [xx, yy] = np.meshgrid(x_1d, y_1d)
        distance = ((center_lonlat[0] - lon)**2 +
                    (center_lonlat[1] - lat)**2)**0.5 * 111
        location_x = np.mean(xx[distance < 1])
        location_y = int(np.mean(yy[distance < 1]))
        location_x = int(np.mean((location_x / lon.shape[1]) * 77))
        sala = sala_all[location_y, location_x] * 1e-6
        if sala < 0:
            sala = 360 + sala
        salz = salz_all[location_y, location_x] * 1e-6
        f_coordinates.close()
        size_x = lon.shape[1]
        size_y = lon.shape[0]
        ul_x = 0
        ul_y = 0
        lr_x = size_x
        lr_y = size_y
    else:
        print('文件缺失')
        return (0)

    size_x = np.shape(lon)[1]
    size_y = np.shape(lat)[0]
    rrs_join = None
    # mask array
    f_name = os.path.join(file_path, 'Oa01_radiance.nc')
    fp_h5 = h5py.File(f_name, 'r')
    data = fp_h5['Oa01_radiance']
    data = data[ul_y:lr_y, ul_x:lr_x]
    hdf_merge = os.path.join(file_path, 'reproj.hdf')
    file_sd_out = SD(hdf_merge, SDC.CREATE | SDC.WRITE)
    size = np.shape(data)
    objo = file_sd_out.create('Oa%s', SDC.UINT16, size)
    objo.set(data)
    objo = file_sd_out.create('longitude', SDC.FLOAT32, size)
    objo.set(lon.astype(np.float32))
    objo = file_sd_out.create('latitude', SDC.FLOAT32, size)
    objo.set(lat.astype(np.float32))
    objo.endaccess()
    file_sd_out.end()
    os.system('cd %s && gdalwarp -t_srs EPSG:4326 HDF4_SDS:UNKNOWN:"%s":0 %s' %
              (file_path, 'reproj.hdf', 'reproj.tif'))
    raster_tmp = gdal.Open(os.path.join(file_path, 'reproj.tif'))
    geo_trans_dst = raster_tmp.GetGeoTransform()
    mask_array = raster_tmp.GetRasterBand(1).ReadAsArray()
    mask_array = np.logical_or(mask_array == 65535, mask_array == 0)
    raster_tmp = None
    os.remove(hdf_merge)
    os.remove(os.path.join(file_path, 'reproj.tif'))

    for i_band in range(nbands):
        Oa_index = 'Oa%02d' % (i_band + 1)
        print(Oa_index)
        if (Oa_index in band_need) or ('all' in band_need):
            f_name = os.path.join(file_path, '%s_radiance.nc' % Oa_index)
            fp_h5 = h5py.File(f_name, 'r')
            data = fp_h5['%s_radiance' % Oa_index]
            data = data[ul_y:lr_y, ul_x:lr_x].astype(float)
            offset = fp_h5['%s_radiance' % Oa_index].attrs['add_offset']
            scale = fp_h5['%s_radiance' % Oa_index].attrs['scale_factor']
            # 辐射定标
            Lr = data * scale + offset
            # 大气校正
            mtl_coef = {
                'altitude': altitude,
                'visibility': visibility,
                'aero_type': aerotype,
                'location': center_lonlat,
                'month': month,
                'day': day,
                'solz': solz,
                'sola': sola,
                'salz': salz,
                'sala': sala
            }
            atms_corr = arms_corr(Lr, mtl_coef, i_band)
            # 重采样
            hdf_merge = os.path.join(file_path, 'reproj.hdf')
            file_sd_out = SD(hdf_merge, SDC.CREATE | SDC.WRITE)
            size = np.shape(atms_corr)
            objo = file_sd_out.create('Oa%s', SDC.FLOAT32, size)
            objo.set(atms_corr.astype(np.float32))
            objo = file_sd_out.create('longitude', SDC.FLOAT32, size)
            objo.set(lon.astype(np.float32))
            objo = file_sd_out.create('latitude', SDC.FLOAT32, size)
            objo.set(lat.astype(np.float32))
            objo.endaccess()
            file_sd_out.end()
            os.system(
                'cd %s && gdalwarp -t_srs EPSG:4326 HDF4_SDS:UNKNOWN:"%s":0 %s'
                % (file_path, 'reproj.hdf', 'reproj.tif'))
            raster_tmp = gdal.Open(os.path.join(file_path, 'reproj.tif'))
            geo_trans_dst = raster_tmp.GetGeoTransform()
            if rrs_join is None:
                rrs_join = np.zeros(
                    [raster_tmp.RasterYSize, raster_tmp.RasterXSize, nbands])
            rrs_join[:, :, i_band] = raster_tmp.GetRasterBand(1).ReadAsArray()
            raster_tmp = None
            os.remove(hdf_merge)
            os.remove(os.path.join(file_path, 'reproj.tif'))
    driver = gdal.GetDriverByName('GTiff')
    name_short = os.path.split(ifile)[1]
    name_short_split = name_short.split('_')
    for item in name_short_split:
        if len(item) == 15:
            date_str = item.replace('T', '')
            date_str = str(int(date_str) + 80000)  # 转换为北京时间
            break
    if name_short_split[0] == 'S3A':
        satellite_code = 'A'
    elif name_short_split[0] == 'S3B':
        satellite_code = 'B'
    else:
        satellite_code = ''
    nrow = name_short_split[-8]
    ncolm = name_short_split[-7]
    name_out = 'Sentinel3%s_OLCI_300_L2_%s_%s_%s.tif' % (satellite_code,
                                                         date_str, nrow, ncolm)
    raster_fn_out = os.path.join(path_out, name_out)
    target_ds = driver.Create(raster_fn_out,
                              np.shape(rrs_join)[1],
                              np.shape(rrs_join)[0], nbands, gdal.GDT_UInt16)
    target_ds.SetGeoTransform(geo_trans_dst)
    raster_srs = osr.SpatialReference()
    raster_srs.ImportFromEPSG(4326)
    proj_ref = raster_srs.ExportToWkt()
    target_ds.SetProjection(proj_ref)
    for i in range(nbands):
        data_tmp = rrs_join[:, :, i]
        data_tmp = (data_tmp * 10000).astype(np.int)
        data_tmp[mask_array] = 65530
        target_ds.GetRasterBand(i + 1).WriteArray(data_tmp)
        band = target_ds.GetRasterBand(1 + 1)
        band.SetNoDataValue(65530)
    target_ds = None
    # 删除解压文件
    fp_h5.close()
    shutil.rmtree(file_path)