Пример #1
0
    def set_file_attr(self):
        """
        seft self.file_att is dict
        """
        data = dict()
        if self.resolution == 40000:
            satellite_type1 = ['METOP-A', 'METOP-B']
            if self.satellite in satellite_type1:

                try:
                    fp = coda.open(self.in_file)
                    product_class = coda.get_product_class(fp)
                    product_type = coda.get_product_type(fp)
                    product_version = coda.get_product_version(fp)
                    product_format = coda.get_product_format(fp)
                    product_size = coda.get_product_file_size(fp)
                    coda.close(fp)
                except Exception as e:
                    print str(e)
                    return

                data['class'] = product_class
                data['size'] = product_size
                data['type'] = product_type
                data['version'] = product_version
                data['format'] = product_format
                self.file_attr = data
            else:
                raise ValueError('Cant read this satellite`s data.: {}'.format(
                    self.satellite))
Пример #2
0
    def set_file_attr(self):
        """
        seft self.file_att is dict
        """
        data = dict()
        if self.resolution == 24000:
            satellite_type1 = ['METOP-A', 'METOP-B']
            if self.satellite in satellite_type1:

                try:
                    fp = coda.open(self.in_file)
                    product_class = coda.get_product_class(fp)
                    product_type = coda.get_product_type(fp)
                    product_version = coda.get_product_version(fp)
                    product_format = coda.get_product_format(fp)
                    product_size = coda.get_product_file_size(fp)
                    coda.close(fp)
                except Exception as e:
                    print str(e)
                    return

                data['class'] = product_class
                data['size'] = product_size
                data['type'] = product_type
                data['version'] = product_version
                data['format'] = product_format
                self.file_attr = data
            else:
                raise ValueError('Cant read this satellite`s data.: {}'.format(
                    self.satellite))

        elif self.resolution == 24001:
            satellite_type1 = ['METOP-A', 'METOP-B']
            if self.satellite in satellite_type1:

                try:
                    # 'NCETCDF4'
                    ncr = Dataset(self.in_file, 'r', format='NETCDF3_CLASSIC')
                    data = ncr.ncattrs()
                    ncr.close()

                except Exception as e:
                    print str(e)
                    return

                self.file_attr = data
            else:
                raise ValueError('Cant read this satellite`s data.: {}'.format(
                    self.satellite))
        else:
            raise ValueError("Cant handle this resolution: ".format(
                self.resolution))
Пример #3
0
    def get_sensor_azimuth(self):
        """
        return sensor_azimuth
        """
        if self.resolution == 40000:
            satellite_type1 = ['METOP-A', 'METOP-B']
            if self.satellite in satellite_type1:
                try:
                    fp = coda.open(self.in_file)
                    angle = coda.fetch(fp, 'MDR', -1, 'Earthshine',
                                       'GEO_EARTH', 'SAT_AZIMUTH')
                    coda.close(fp)
                    # angle = 30*3*32 = 1440, 取30*1*32 = 960, 取前959个
                    data_size = self.data_shape[0]
                    data_row = angle.shape[0]
                    data_col = angle[0].shape[1]
                    data_pre = np.full(self.data_shape, -999.)

                    for i in xrange(data_size):
                        row, col = np.unravel_index(i, (data_row, data_col))
                        #                         print row, col, angle[row][1][col]
                        data_pre[i] = angle[row][1][col]

                    # 过滤无效值
                    invalid_index = np.logical_or(data_pre < 0, data_pre > 360)
                    data_pre = data_pre.astype(np.float32)
                    data_pre[invalid_index] = np.nan

                    data = data_pre

                except Exception as e:
                    print 'Open file error {}'.format(e)
                    return
            else:
                raise ValueError('Cant read this satellite`s data.: {}'.format(
                    self.satellite))

        else:
            raise ValueError(
                'Cant read this data, please check its resolution: {}'.format(
                    self.in_file))
        return data
Пример #4
0
def get_footprint(product):
    try:
        import coda
    except ImportError:
        return None
    path = "/METADATA/EOP_METADATA/om_featureOfInterest/eop_multiExtentOf/gml_surfaceMembers/gml_exterior@gml_posList"
    pf = coda.open(product)
    try:
        coord = coda.fetch(pf, path).split(' ')
    except coda.CodacError:
        return None
    finally:
        coda.close(pf)
    if len(coord) % 2 != 0:
        return None
    return Polygon([
        LinearRing([
            Point(float(lon), float(lat))
            for lat, lon in zip(coord[0::2], coord[1::2])
        ])
    ])
Пример #5
0
altitude = coda.fetch(product, 'geolocation', -1, 'measurement_geolocation', -1, 'mie_geolocation', -1, 'altitude_of_height_bin')
altitude = vstack(hstack(altitude))

wind_velocity = coda.fetch(product, 'wind_velocity', -1, 'measurement_wind_profile', -1, 'mie_altitude_bin_wind_info', -1, 'wind_velocity')
wind_velocity = vstack(hstack(wind_velocity))

print(wind_velocity.shape)
print(wind_velocity)

# Rayleigh measurement wind profiles

print("Rayleigh measurement wind profiles")

latitude = coda.fetch(product, 'geolocation', -1, 'measurement_geolocation', -1, 'rayleigh_geolocation', -1, 'latitude_of_height_bin')
latitude = vstack(hstack(latitude))

longitude = coda.fetch(product, 'geolocation', -1, 'measurement_geolocation', -1, 'rayleigh_geolocation', -1, 'longitude_of_height_bin')
longitude = vstack(hstack(longitude))

altitude = coda.fetch(product, 'geolocation', -1, 'measurement_geolocation', -1, 'rayleigh_geolocation', -1, 'altitude_of_height_bin')
altitude = vstack(hstack(altitude))

wind_velocity = coda.fetch(product, 'wind_velocity', -1, 'measurement_wind_profile', -1, 'rayleigh_altitude_bin_wind_info', -1, 'wind_velocity')
wind_velocity = vstack(hstack(wind_velocity))

print(wind_velocity.shape)
print(wind_velocity)

coda.close(product)
Пример #6
0
    cursor = coda.Cursor()
    coda.cursor_set_product(cursor, pf)

    print "  MPH :"
    coda.cursor_goto_record_field_by_name(cursor, "mph")
    print_record(cursor)
    coda.cursor_goto_parent(cursor)

    print "  SPH :";
    coda.cursor_goto_record_field_by_name(cursor, "sph")
    print_record(cursor);
    coda.cursor_goto_parent(cursor)

    coda.cursor_goto_record_field_by_name(cursor, "dsd")
    num_dsd = coda.cursor_get_num_elements(cursor)
    if num_dsd > 0:
        coda.cursor_goto_first_array_element(cursor)
        for i in range(num_dsd):
            print "  DSD(%d) :" % i
            print_record(cursor);

            if i < num_dsd - 1:
                coda.cursor_goto_next_array_element(cursor);

        coda.cursor_goto_parent(cursor)

    coda.cursor_goto_parent(cursor)

    coda.close(pf);
Пример #7
0
amd_t = numpy.empty([num_records, num_layers])
amd_u = numpy.empty([num_records, num_layers])

cursor = coda.Cursor()
coda.cursor_set_product(cursor, product)
coda.cursor_goto(cursor, '/met_off_nadir[0]')
for i in range(num_records):
    coda.cursor_goto(cursor, 'profile_data[0]')
    for j in range(num_layers - 1):
        coda.cursor_goto(cursor, 'amd_pnom')
        amd_pnom[i, j] = coda.cursor_read_double(cursor)
        coda.cursor_goto(cursor, '../amd_znom')
        amd_znom[i, j] = coda.cursor_read_double(cursor)
        coda.cursor_goto(cursor, '../amd_t')
        amd_t[i, j] = coda.cursor_read_double(cursor)
        coda.cursor_goto(cursor, '../amd_u')
        amd_u[i, j] = coda.cursor_read_double(cursor)
        coda.cursor_goto_parent(cursor)
        coda.cursor_goto_next_array_element(cursor)
    coda.cursor_goto_parent(cursor)
    coda.cursor_goto_parent(cursor)
    if i < num_records - 1:
        coda.cursor_goto_next_array_element(cursor)
del cursor
coda.close(product)

print(amd_pnom)
print(amd_znom)
print(amd_t)
print(amd_u)
Пример #8
0
class CLASS_IASI_L1():
    def __init__(self, BandLst):

        # 字典类型物理量
        self.Tbb = {}
        self.Rad = {}

        # 二维矩阵
        self.Lons = []
        self.Lats = []
        self.Time = []

        self.satAzimuth = []
        self.satZenith = []
        self.sunAzimuth = []
        self.sunZenith = []

        # 光谱信息
        self.wavenumber = []
        self.radiance = []

    def Load(self, L1File):

        print u'读取 LEO所有数据信息......'
        if not os.path.isfile(L1File):
            print 'Error: %s not found' % L1File
            sys.exit(1)

        try:
            fp = coda.open(L1File)
        except Exception, e:
            print 'Open file error<%s> .' % (e)
            return

        try:
            # EPS = EUMETSAT Polar System atmospheric products (GOME-2 and IASI)
            # EPS = EUMETSAT极地大气系统产品(GOME-2和IASI)'
            # 获取文件头信息
            product_class = coda.get_product_class(fp)
            product_type = coda.get_product_type(fp)
            product_version = coda.get_product_version(fp)
            product_format = coda.get_product_format(fp)
            product_size = coda.get_product_file_size(fp)
            print 'product_class ', product_class
            print 'product_type', product_type
            print 'product_version', product_version
            print 'product_format', product_format
            print 'product_size', product_size
            record = beatl2.ingest(L1File)
            print record

            SAT_angle = coda.fetch(fp, 'MDR', -1, 'MDR', 'GGeoSondAnglesMETOP')
            SUN_angle = coda.fetch(fp, 'MDR', -1, 'MDR', 'GGeoSondAnglesSUN')
            all_sun_angle = []
            all_sat_angle = []

            for i in xrange(len(SAT_angle)):
                tmp_sat = SAT_angle[i].reshape(-1)
                tmp_sun = SUN_angle[i].reshape(-1)
                if len(all_sat_angle) == 0:
                    all_sat_angle = tmp_sat
                    all_sun_angle = tmp_sun
                else:
                    all_sat_angle = np.concatenate((all_sat_angle, tmp_sat), 0)
                    all_sun_angle = np.concatenate((all_sun_angle, tmp_sun), 0)

            iasiLen = len(record.longitude)
            self.satZenith = (all_sat_angle[0::2]).reshape(iasiLen, 1)
            self.satAzimuth = (all_sat_angle[1::2]).reshape(iasiLen, 1)
            self.sunZenith = (all_sun_angle[0::2]).reshape(iasiLen, 1)
            self.sunAzimuth = (all_sun_angle[1::2]).reshape(iasiLen, 1)

            self.Lons = (record.longitude).reshape(iasiLen, 1)
            self.Lats = (record.latitude).reshape(iasiLen, 1)

            self.radiance = record.spectral_radiance * 10**7

            # 暂时取一个观测的光谱波数
            self.wavenumber = record.wavenumber[0, :]

            v_ymd2seconds = np.vectorize(metop_ymd2seconds)
            T1 = v_ymd2seconds(record.time)
            self.Time = T1.reshape(iasiLen, 1)

        except Exception as e:
            print str(e)
            sys.exit(1)
        finally:
            coda.close(fp)
Пример #9
0
        if not product_class.startswith("ENVISAT") or product_type != "MIP_NL__2P":
            print >>sys.stderr, "Error: file %s is not a MIPAS Level 2 product (product class = %s, product type = %s)" % (sys.argv[1], product_class, product_type)
            sys.exit(1)

        
        print "Processing : %s" % f

        cursor = coda.Cursor()
        coda.cursor_set_product(cursor, pf)
        coda.cursor_goto_record_field_by_name(cursor, "scan_geolocation_ads")
       
        num_dsr = coda.cursor_get_num_elements(cursor)
        if num_dsr > 0:
            index = 0

            coda.cursor_goto_first_array_element(cursor)
            while index < num_dsr:
                coda.cursor_goto_record_field_by_name(cursor, "loc_mid")
                coda.cursor_goto_record_field_by_name(cursor, "latitude")
                latitude = coda.cursor_read_double(cursor)
                coda.cursor_goto_next_record_field(cursor)
                longitude = coda.cursor_read_double(cursor)
                print "latitude : %-8.4f  longitude : %-8.4f" % (latitude, longitude)
                coda.cursor_goto_parent(cursor)
                coda.cursor_goto_parent(cursor)
                index += 1
                if index < num_dsr:
                    coda.cursor_goto_next_array_element(cursor)

        coda.close(pf)
Пример #10
0
        Lon.append(lon)
        Rad.append(rad_arr)  # radiance at TOA
        Spec.append(rad_lam)  # wavelenghts of the radiance
        Irr.append(irr_arr)  # irradiance at TOA
        Alb_toa.append(alb_toa_arr)  # albedo at TOA
        Alb_sur.append(alb_sur_arr)  # albedo at surface
        Time.append(time_s)  # time in sec since 1970-01-01
        Alt.append(alt)  # altitude in meters
        Cloud_frac.append(cf)  # cloud fraction
        Sol_zen.append(sol_zen)
        Sol_azi.append(sol_azi)
        Sat_zen.append(sat_zen)
        Sat_azi.append(sat_azi)

# closes file to free memory
coda.close(ef)

# plots the DASF values
'''
plt.scatter(Lon, Lat, c=DASF, s=50, cmap=mpl.cm.gray)
plt.show()
'''

# saves the data into a dictionary on disk
disk_dict = dict(DASF=DASF, QA_dasf=QA_dasf, Lat=Lat, Lon=Lon, Rad=Rad,\
    Spec=Spec, Irr=Irr, Alb_toa=Alb_toa, Alb_sur=Alb_sur, Time=Time,\
    Alt=Alt, Cloud_frac=Cloud_frac, Sol_zen=Sol_zen, Sol_azi=Sol_azi,\
    Sat_zen=Sat_zen, Sat_azi=Sat_azi)
d_fn = os.path.join(path_dasf, base_fn + '.p')
pf = open(d_fn, 'wb')
pickle.dump(disk_dict, pf)
Пример #11
0
    def get_solar_zenith(self):
        """
        return solar_zenith
        """
        if self.resolution == 24000:
            satellite_type1 = ['METOP-A', 'METOP-B']
            if self.satellite in satellite_type1:

                try:
                    fp = coda.open(self.in_file)
                    angle = coda.fetch(fp, 'MDR', -1, 'MDR',
                                       'GGeoSondAnglesSUN')
                    coda.close(fp)

                    all_angle = []

                    for i in xrange(len(angle)):
                        tmp_angle = angle[i].reshape(-1)
                        if len(all_angle) == 0:
                            all_angle = tmp_angle
                        else:
                            all_angle = np.concatenate((all_angle, tmp_angle))

                    s0 = self.data_shape[0]
                    # 开始间隔一个取一个值,取偶数位
                    data_pre = (all_angle[0::2]).reshape(s0, 1)

                    # 过滤无效值
                    invalid_index = np.logical_or(data_pre < 0, data_pre > 180)
                    data_pre = data_pre.astype(np.float32)
                    data_pre[invalid_index] = np.nan

                    data = data_pre

                except Exception as e:
                    print 'Open file error {}'.format(e)
                    return
            else:
                raise ValueError('Cant read this satellite`s data.: {}'.format(
                    self.satellite))
        elif self.resolution == 24001:
            satellite_type1 = ['METOP-A', 'METOP-B']
            if self.satellite in satellite_type1:

                try:
                    ncr = Dataset(self.in_file, 'r', format='NETCDF3_CLASSIC')
                    data_pre = ncr.variables['solar_zenith_angle'][:]
                    ncr.close()

                    # 过滤无效值
                    invalid_index = np.logical_or(data_pre < 0, data_pre > 180)
                    data_pre = data_pre.astype(np.float32)
                    data_pre[invalid_index] = np.nan

                    data = data_pre.reshape(self.data_shape)

                except Exception as e:
                    print 'Open file error {}'.format(e)
                    return
            else:
                raise ValueError('Cant read this satellite`s data.: {}'.format(
                    self.satellite))
        else:
            raise ValueError(
                'Cant read this data, please check its resolution: {}'.format(
                    self.in_file))
        return data
Пример #12
0
    def read(self,
             vars_to_retrieve=None,
             files=[],
             first_file=None,
             last_file=None,
             file_pattern=None,
             list_coda_paths=False,
             local_temp_dir=None,
             return_as='numpy',
             apply_quality_flag=0.0):
        """Method that reads list of files as instance of :class:`UngriddedData`

        Parameters
        ----------
        vars_to_retrieve : :obj:`list` or similar, optional,
            list containing variable IDs that are supposed to be read. If None,
            all variables in :attr:`PROVIDES_VARIABLES` are loaded
        files : :obj:`list`, optional
            list of files to be read. If None, then the file list is used that
            is returned on :func:`get_file_list`.
        first_file : :obj:`int`, optional
            index of first file in file list to read. If None, the very first
            file in the list is used. Note: is ignored if input parameter
            `file_pattern` is specified.
        last_file : :obj:`int`, optional
            index of last file in list to read. If None, the very last file
            in the list is used. Note: is ignored if input parameter
            `file_pattern` is specified.
        file_pattern : str, optional
            string pattern for file search (cf :func:`get_file_list`)
        :param local_temp_dir:

        Returns
        -------
        UngriddedData
            data object

        Example:
        >>> import pyaerocom as pya
        >>> obj = pya.io.read_aeolus_l2a_data.ReadL2Data()
        >>> testfiles = []
        >>> testfiles.append('/lustre/storeB/project/fou/kl/admaeolus/data.rev.2A02/download/2018-12/01/AE_OPER_ALD_U_N_2A_20181201T033526026_005423993_001590_0001.TGZ')
        >>> data=obj.read(files=testfiles)
        >>> data=obj.read(files=testfiles, vars_to_retrieve='ec355aer')

        """

        import pathlib
        import tarfile
        import os
        import coda

        if local_temp_dir is None:
            local_temp_dir = self.LOCAL_TMP_DIR

        if vars_to_retrieve is None:
            vars_to_retrieve = self.DEFAULT_VARS
        elif isinstance(vars_to_retrieve, str):
            vars_to_retrieve = [vars_to_retrieve]

        if files is None:
            if len(self.files) == 0:
                self.get_file_list(pattern=file_pattern)
            files = self.files

        if file_pattern is None:
            if first_file is None:
                first_file = 0
            if last_file is None:
                last_file = len(files)

            files = files[first_file:last_file]

        self.read_failed = []
        temp_files = {}

        data_obj = UngriddedData(num_points=self._CHUNKSIZE)
        meta_key = 0.0
        idx = 0

        # check if the supplied file is a supported archive file (tar in this case)
        # and extract the files with supported suffixes to const.CACHEDIR
        non_archive_files = []
        for idx, _file in enumerate(sorted(files)):
            # temp = 'reading file: {}'.format(_file)

            self.logger.info('file: {}'.format(_file))
            suffix = pathlib.Path(_file).suffix
            if suffix in self.SUPPORTED_ARCHIVE_SUFFIXES:
                temp = 'opening archive file; using {} as temp dir.'.format(
                    local_temp_dir)
                self.logger.info(temp)
                # untar archive files first
                tarhandle = tarfile.open(_file)
                files_in_tar = tarhandle.getnames()
                for file_in_tar in files_in_tar:
                    if pathlib.Path(
                            file_in_tar).suffix in self.SUPPORTED_SUFFIXES:
                        # extract file to tmp path
                        member = tarhandle.getmember(file_in_tar)
                        temp = 'extracting file {}...'.format(member.name)
                        self.logger.info(temp)
                        tarhandle.extract(member,
                                          path=local_temp_dir,
                                          set_attrs=False)
                        extract_file = os.path.join(local_temp_dir,
                                                    member.name)
                        non_archive_files.append(extract_file)
                        temp_files[extract_file] = True
                tarhandle.close()
            else:
                non_archive_files.append(_file)

        for idx, _file in enumerate(sorted(non_archive_files)):
            # list coda data paths in the 1st file in case the user asked for that
            if idx == 0 and list_coda_paths:
                pass
                coda_handle = coda.open(_file)
                root_field_names = coda.get_field_names(coda_handle)
                for field in root_field_names:
                    print(field)
                coda.close(coda_handle)
                data_obj = None
                return data_obj

            file_data = self.read_file(_file,
                                       vars_to_retrieve=vars_to_retrieve,
                                       loglevel=logging.INFO,
                                       return_as=return_as)
            if return_as == 'numpy':
                self.logger.info('{} points read'.format(file_data.shape[0]))
                # the metadata dict is left empty for L2 data
                # the location in the data set is time step dependant!
                if idx == 0:
                    data_obj._data = file_data

                else:
                    data_obj._data = np.append(data_obj._data,
                                               file_data,
                                               axis=0)

                data_obj._idx = data_obj._data.shape[0] + 1
                file_data = None
                # remove file if it was temporary one
                if _file in temp_files:
                    os.remove(_file)
                #     pass
                # tmp_obj = UngriddedData()
                # tmp_obj._data = file_data
                # tmp_obj._idx = data_obj._data.shape[0] + 1
                # data_obj.append(tmp_obj)
                self.logger.info(
                    'size of data object: {}'.format(data_obj._idx - 1))
            elif return_as == 'dict':
                if idx == 0:
                    data_obj._data = {}
                    shape_store = {}
                    index_store = {}
                    file_start_index_arr = [0]
                    # apply quality flags
                    if apply_quality_flag > 0.:
                        qflags = file_data[self._QANAME]
                        keep_indexes = np.where(qflags >= apply_quality_flag)
                        elements_to_add = keep_indexes.size
                    else:
                        keep_indexes = np.arange(0,
                                                 len(file_data[self._QANAME]))
                        elements_to_add = file_data[self._QANAME].shape[0]

                    for _key in file_data:
                        # print('key: {}'.format(_key))
                        shape_store[_key] = file_data[_key].shape
                        index_store[_key] = file_data[_key].shape[0]
                        input_shape = list(file_data[_key].shape)
                        input_shape[0] = self._ROWNO
                        data_obj._data[_key] = np.empty(input_shape,
                                                        dtype=np.float_)
                        if len(input_shape) == 1:
                            data_obj._data[_key][0:file_data[_key].
                                                 shape[0]] = file_data[_key]
                        elif len(input_shape) == 2:
                            data_obj._data[_key][0:file_data[_key].
                                                 shape[0], :] = file_data[_key]
                        elif len(input_shape) == 3:
                            data_obj._data[_key][
                                0:file_data[_key].
                                shape[0], :, :] = file_data[_key]
                        elif len(input_shape) == 4:
                            data_obj._data[_key][
                                0:file_data[_key].
                                shape[0], :, :, :] = file_data[_key]
                        else:
                            pass

                # 2nd + file
                else:
                    if apply_quality_flag > 0.:
                        qflags = file_data[self._QANAME]
                        keep_indexes = np.where(qflags >= apply_quality_flag)
                        elements_to_add = keep_indexes.size

                    file_start_index_arr.append(
                        file_data[self.TSSIZENAME].shape[0])
                    for _key in file_data:
                        if _key in self.STATICFIELDNAMES:
                            print('key: {}'.format(_key))
                            continue
                        # shape_store[_key] = file_data[_key].shape
                        elements_to_add = file_data[_key].shape[0]
                        # extend data_obj._data[_key] if necessary
                        if index_store[_key] + elements_to_add > data_obj._data[
                                _key].shape[0]:
                            current_shape = list(data_obj._data[_key].shape)
                            current_shape[
                                0] = current_shape[0] + self._CHUNKSIZE
                            tmp_data = np.empty(current_shape, dtype=np.float_)
                            if len(current_shape) == 1:
                                tmp_data[0:data_obj._data[_key].
                                         shape[0]] = data_obj._data[_key]
                            elif len(current_shape) == 2:
                                tmp_data[0:data_obj._data[_key].
                                         shape[0], :] = data_obj._data[_key]
                            elif len(current_shape) == 3:
                                tmp_data[0:data_obj._data[_key].
                                         shape[0], :, :] = data_obj._data[_key]
                            elif len(current_shape) == 4:
                                tmp_data[
                                    0:data_obj._data[_key].
                                    shape[0], :, :, :] = data_obj._data[_key]
                            else:
                                pass

                        input_shape = list(file_data[_key].shape)
                        if len(input_shape) == 1:
                            data_obj._data[_key][
                                index_store[_key]:index_store[_key] +
                                file_data[_key].shape[0]] = file_data[_key]
                        elif len(input_shape) == 2:
                            data_obj._data[_key][
                                index_store[_key]:index_store[_key] +
                                file_data[_key].shape[0], :] = file_data[_key]
                        elif len(input_shape) == 3:
                            data_obj._data[_key][
                                index_store[_key]:index_store[_key] +
                                file_data[_key].
                                shape[0], :, :] = file_data[_key]
                        elif len(input_shape) == 4:
                            data_obj._data[_key][
                                index_store[_key]:index_store[_key] +
                                file_data[_key].
                                shape[0], :, :, :] = file_data[_key]
                        else:
                            pass
                        index_store[_key] += elements_to_add

                file_data = None
                # remove file if it was temporary one
                if _file in temp_files:
                    os.remove(_file)
            else:
                pass

        # now shorten the data dict to the necessary size
        if return_as == 'dict':
            for _key in data_obj._data:
                data_obj._data[_key] = data_obj._data[_key][:index_store[_key]]
            data_obj._data['file_indexes'] = file_start_index_arr

            # apply the quality flags
            if apply_quality_flag > 0.:
                pass

        return data_obj
 def coda_open(filename):
     coda_handle = coda.open(filename)
     try:
         yield coda_handle
     finally:
         coda.close(coda_handle)
Пример #14
0
    def get_spectral_response(self):
        """
        return 光谱波数和响应值,1维,2维
        """
        k = 1.98644746103858e-9
        if self.resolution == 40000:
            satellite_type1 = ['METOP-A', 'METOP-B']
            if self.satellite in satellite_type1:
                try:
                    fp = coda.open(self.in_file)
                    wave3 = coda.fetch(fp, 'MDR', -1, 'Earthshine',
                                       'WAVELENGTH_3')
                    wave4 = coda.fetch(fp, 'MDR', -1, 'Earthshine',
                                       'WAVELENGTH_4')
                    lambda_smr = coda.fetch(fp, 'VIADR_SMR', -1, 'LAMBDA_SMR')
                    smr = coda.fetch(fp, 'VIADR_SMR', -1, 'SMR')

                    sunz = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH',
                                      'SOLAR_ZENITH')
                    coda.close(fp)

                    #  gome 959x4096 取后2048个点的辐射值
                    wavelens = self.record.wavelength[0, 2048:]
                    response = self.record.spectral_radiance[:, 2048:]

                    data_size = self.data_shape[0]
                    data_row = sunz.shape[0]
                    data_col = sunz[0].shape[1]
                    # gome的辐亮度计算 wave3 wave4  是30*1024

                    for m in xrange(data_size):
                        row, col = np.unravel_index(m, (data_row, data_col))
                        for i in xrange(2048):
                            if i < 1024:
                                response[m,
                                         i] = response[m,
                                                       i] * k / wave3[row][i]
                            else:
                                response[m, i] = response[
                                    m, i] * k / wave4[row][i - 1024]

                    # 计算太阳辐亮度
                    sol_wavelens = np.zeros((2048, ))  # 太阳辐亮度
                    sol_response = np.zeros((2048, ))  # 太阳辐亮度对应的波长
                    for i in xrange(2048):
                        if i < 1024:
                            sol_response[i] = (smr[0][2][i] *
                                               k) / lambda_smr[0][2][i]
                            sol_wavelens[i] = lambda_smr[0][2][i]
                        else:
                            sol_response[i] = (smr[0][3][i - 1024] *
                                               k) / lambda_smr[0][3][i - 1024]
                            sol_wavelens[i] = lambda_smr[0][3][i - 1024]

                    idx = np.where(response < 0)
                    print len(idx[0])
                    if len(idx[0] > 0):
                        response[idx] = 0.
                    gome_wavelens, gome_response, solar_response = self.combine_gome_band34(
                        wavelens, response, sol_wavelens, sol_response)
                    s0, s1 = gome_response.shape
                    gome_response = gome_response.reshape(s0, 1, s1)
                    print gome_response.shape, solar_response.shape
                except Exception as e:
                    print 'Open file error {}'.format(e)
                    return
            self.solar_response = solar_response
        return gome_wavelens, gome_response
Пример #15
0
class CLASS_GOME_L1():

    def __init__(self, BandLst):

        self.k = 1.98644746103858e-9

        # 字典类型物理量
        self.Ref = {}

        # 二维矩阵
        self.Lons = []
        self.Lats = []
        self.Time = []

        self.satAzimuth = []
        self.satZenith = []
        self.sunAzimuth = []
        self.sunZenith = []

        # 光谱信息
        self.wavenumber = []
        self.radiance = []

    def Load(self, L1File):

        print u'读取 LEO所有数据信息......'
        if not os.path.isfile(L1File):
            print 'Error: %s not found' % L1File
            sys.exit(1)

        try:
            fp = coda.open(L1File)
        except Exception, e:
            print 'Open file error<%s> .' % (e)
            return

        try:
            # EPS = EUMETSAT Polar System atmospheric products (GOME-2 and IASI)
            # EPS = EUMETSAT极地大气系统产品(GOME-2和IASI)'
            # 获取文件头信息
            product_class = coda.get_product_class(fp)
            product_type = coda.get_product_type(fp)
            product_version = coda.get_product_version(fp)
            product_format = coda.get_product_format(fp)
            product_size = coda.get_product_file_size(fp)
            print 'product_class ', product_class
            print 'product_type', product_type
            print 'product_version', product_version
            print 'product_format', product_format
            print 'product_size', product_size
            record = beatl2.ingest(L1File)

            WAVE_3 = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'WAVELENGTH_3')
            WAVE_4 = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'WAVELENGTH_4')

            LAMBDA_SMR = coda.fetch(fp, 'VIADR_SMR', -1, 'LAMBDA_SMR')
            SMR = coda.fetch(fp, 'VIADR_SMR', -1, 'SMR')

            print 'gome data is:'
            print record

            self.rec = record
            SUN_Z = coda.fetch(
                fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH', 'SOLAR_ZENITH')
            SUN_A = coda.fetch(
                fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH', 'SOLAR_AZIMUTH')
            SAT_Z = coda.fetch(
                fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH', 'SAT_ZENITH')
            SAT_A = coda.fetch(
                fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH', 'SAT_AZIMUTH')

            print '太阳方位角度长度', SUN_Z.shape, SUN_Z[0].shape

            # 数据观测总长度
            dataLen = record.latitude.size
            dataCol = SUN_Z[0].shape[1]
            dataRow = SUN_Z.shape[0]

            # 角度存放内存
            self.satZenith = np.full((dataLen, 1), -999.)
            self.satAzimuth = np.full((dataLen, 1), -999.)
            self.sunZenith = np.full((dataLen, 1), -999.)
            self.sunAzimuth = np.full((dataLen, 1), -999.)

            # 开始赋值
            for i in xrange(dataLen):
                row, col = np.unravel_index(i, (dataRow, dataCol))
                self.satAzimuth[i] = SAT_A[row][1][col]
                self.satZenith[i] = SAT_Z[row][1][col]
                self.sunAzimuth[i] = SUN_A[row][1][col]
                self.sunZenith[i] = SUN_Z[row][1][col]

            self.Lons = (record.longitude).reshape(dataLen, 1)
            self.Lats = (record.latitude).reshape(dataLen, 1)

            # 计算gome的辐亮度
            self.radiance = record.spectral_radiance[:, 2048:]
            for m in xrange(dataLen):
                row, col = np.unravel_index(m, (dataRow, dataCol))
                for i in xrange(2048):
                    if i < 1024:
                        self.radiance[m, i] = self.radiance[
                            m, i] * self.k / WAVE_3[row][i]
                    else:
                        self.radiance[m, i] = self.radiance[
                            m, i] * self.k / WAVE_4[row][i - 1024]

            # 计算太阳辐亮度
            self.vec_Solar_L = np.zeros((2048,))  # 太阳辐亮度
            self.vec_Solar_WL = np.zeros((2048,))  # 太阳辐亮度对应的波长
            for i in xrange(2048):
                if i < 1024:
                    self.vec_Solar_L[i] = (
                        SMR[0][2][i] * self.k) / LAMBDA_SMR[0][2][i]
                    self.vec_Solar_WL[i] = LAMBDA_SMR[0][2][i]
                else:
                    self.vec_Solar_L[i] = (
                        SMR[0][3][i - 1024] * self.k) / LAMBDA_SMR[0][3][i - 1024]
                    self.vec_Solar_WL[i] = LAMBDA_SMR[0][3][i - 1024]

            print 'GOME数据观测长度 %d' % dataLen
            print '太阳辐亮度波长最小最大值'
            print np.min(self.vec_Solar_WL), self.vec_Solar_WL[0:3]
            print np.max(self.vec_Solar_WL)
            # 暂时取一个观测的光谱波数
            self.wavenumber = record.wavelength[0, 2048:]
            print 'GOME辐亮度波长最小最大值,取第一个观测点'
            print np.min(self.wavenumber), self.wavenumber[0:3]
            print np.max(self.wavenumber)
            self.wavenumber = record.wavelength[9, 2048:]
            print 'GOME辐亮度波长最小最大值,取第十个观测点'
            print np.min(self.wavenumber), self.wavenumber[0:3]
            print np.max(self.wavenumber)

            v_ymd2seconds = np.vectorize(metop_ymd2seconds)
            T1 = v_ymd2seconds(record.time)
            self.Time = T1.reshape(dataLen, 1)
#             print time.gmtime(self.Time[0, 0])

        except Exception as e:
            print str(e)
            sys.exit(1)
        finally:
            coda.close(fp)
Пример #16
0
    def read(self, vars_to_retrieve=None, files=[], first_file=None,
             last_file=None, file_pattern=None, list_coda_paths=False,
             local_temp_dir=None):
        """Method that reads list of files as instance of :class:`UngriddedData`

        Parameters
        ----------
        vars_to_retrieve : :obj:`list` or similar, optional,
            list containing variable IDs that are supposed to be read. If None,
            all variables in :attr:`PROVIDES_VARIABLES` are loaded
        files : :obj:`list`, optional
            list of files to be read. If None, then the file list is used that
            is returned on :func:`get_file_list`.
        first_file : :obj:`int`, optional
            index of first file in file list to read. If None, the very first
            file in the list is used. Note: is ignored if input parameter
            `file_pattern` is specified.
        last_file : :obj:`int`, optional
            index of last file in list to read. If None, the very last file
            in the list is used. Note: is ignored if input parameter
            `file_pattern` is specified.
        file_pattern : str, optional
            string pattern for file search (cf :func:`get_file_list`)
        :param local_temp_dir:

        Returns
        -------
        UngriddedData
            data object

        Example:
        >>> import pyaerocom as pya
        >>> obj = pya.io.read_aeolus_l2a_data.ReadL2Data()
        >>> testfiles = []
        >>> testfiles.append('/lustre/storeB/project/fou/kl/admaeolus/data.rev.2A02/download/2018-12/01/AE_OPER_ALD_U_N_2A_20181201T033526026_005423993_001590_0001.TGZ')
        >>> data=obj.read(files=testfiles)
        >>> data=obj.read(files=testfiles, vars_to_retrieve='ec355aer')

        """

        import pathlib
        import tarfile
        import os
        import coda

        if local_temp_dir is None:
            local_temp_dir = self.LOCAL_TMP_DIR

        if vars_to_retrieve is None:
            vars_to_retrieve = self.DEFAULT_VARS
        elif isinstance(vars_to_retrieve, str):
            vars_to_retrieve = [vars_to_retrieve]

        if files is None:
            if len(self.files) == 0:
                self.get_file_list(pattern=file_pattern)
            files = self.files

        if file_pattern is None:
            if first_file is None:
                first_file = 0
            if last_file is None:
                last_file = len(files)

            files = files[first_file:last_file]

        self.read_failed = []
        temp_files = {}

        data_obj = UngriddedData(num_points=self._COLNO, chunksize=self._CHUNKSIZE)
        meta_key = 0.0
        idx = 0

        # check if the supplied file is a supported archive file (tar in this case)
        # and extract the files with supported suffixes to const._cachedir
        non_archive_files = []
        for idx, _file in enumerate(sorted(files)):
            # temp = 'reading file: {}'.format(_file)

            self.logger.info('file: {}'.format(_file))
            suffix = pathlib.Path(_file).suffix
            if suffix in self.SUPPORTED_ARCHIVE_SUFFIXES:
                temp = 'opening archive file; using {} as temp dir.'.format(local_temp_dir)
                self.logger.info(temp)
                # untar archive files first
                tarhandle = tarfile.open(_file)
                files_in_tar = tarhandle.getnames()
                for file_in_tar in files_in_tar:
                    if pathlib.Path(file_in_tar).suffix in self.SUPPORTED_SUFFIXES:
                        # extract file to tmp path
                        member = tarhandle.getmember(file_in_tar)
                        temp = 'extracting file {}...'.format(member.name)
                        self.logger.info(temp)
                        tarhandle.extract(member, path=local_temp_dir, set_attrs=False)
                        extract_file = os.path.join(local_temp_dir, member.name)
                        non_archive_files.append(extract_file)
                        temp_files[extract_file] = True
                tarhandle.close()
            else:
                non_archive_files.append(_file)

        for idx, _file in enumerate(sorted(non_archive_files)):
            # list coda data paths in the 1st file in case the user asked for that
            if idx == 0 and list_coda_paths:
                pass
                coda_handle = coda.open(_file)
                root_field_names = coda.get_field_names(coda_handle)
                for field in root_field_names:
                    print(field)
                coda.close(coda_handle)
                data_obj = None
                return data_obj

            file_data = self.read_file(_file, vars_to_retrieve=vars_to_retrieve,
                                       loglevel=logging.INFO, return_as='numpy')
            self.logger.info('{} points read'.format(file_data.shape[0]))
            # the metadata dict is left empty for L2 data
            # the location in the data set is time step dependant!
            if idx == 0:
                data_obj._data = file_data

            else:
                data_obj._data = np.append(data_obj._data, file_data, axis=0)

            data_obj._idx = data_obj._data.shape[0] + 1
            file_data = None
            # remove file if it was temporary one
            if _file in temp_files:
                os.remove(_file)
            #     pass
            # tmp_obj = UngriddedData()
            # tmp_obj._data = file_data
            # tmp_obj._idx = data_obj._data.shape[0] + 1
            # data_obj.append(tmp_obj)

        self.logger.info('size of data object: {}'.format(data_obj._idx - 1))
        return data_obj
class GOME_COMM():
    def __init__(self):

        self.k = 1.98644746103858e-9

        # 中心 纬度 经度
        self.centre_lat = np.zeros((30, 24))
        self.centre_lon = np.zeros((30, 24))
        self.centre_row = np.zeros((30, 24))
        self.centre_col = np.zeros((30, 24))
        # 角点 纬度 经度
        self.corner_lat = np.zeros((30, 24, 4))
        self.corner_lon = np.zeros((30, 24, 4))
        self.corner_row = np.zeros((30, 24, 4))
        self.corner_col = np.zeros((30, 24, 4))

        self.sun_Z = np.zeros((30, 24))  # 太阳天顶角
        self.sun_A = np.zeros((30, 24))  # 太阳方位角
        self.sat_Z = np.zeros((30, 24))  # 卫星天顶角
        self.sat_A = np.zeros((30, 24))  # 卫星方位角

        self.band3 = np.zeros((30, 24, 1024))  # 辐射值
        self.band4 = np.zeros((30, 24, 1024))
        self.band3_ERR_RAD = np.zeros((30, 24, 1024))
        self.band3_STOKES_FRACTION = np.zeros((30, 24, 1024))
        self.band4_ERR_RAD = np.zeros((30, 24, 1024))
        self.band4_STOKES_FRACTION = np.zeros((30, 24, 1024))
        self.wave3 = np.zeros((30, 1024))  # 波长
        self.wave4 = np.zeros((30, 1024))

        #         self.LAMBDA_SMR3 = np.zeros((1, 1024))
        #         self.LAMBDA_SMR4 = np.zeros((1, 1024))
        #         self.SMR3 = np.zeros((1, 1024))
        #         self.SMR4 = np.zeros((1, 1024))
        #         self.E_SMR3 = np.zeros((1, 1024))
        #         self.E_SMR4 = np.zeros((1, 1024))
        #         self.E_REL_SUN3 = np.zeros((1, 1024))
        #         self.E_REL_SUN4 = np.zeros((1, 1024))

        self.E_SMR3 = np.zeros((1024))
        self.E_SMR4 = np.zeros((1024))
        self.E_REL_SUN3 = np.zeros((1024))
        self.E_REL_SUN4 = np.zeros((1024))

        self.arr_GOME_L = np.zeros((2048, 30, 24))  # 辐亮度
        self.arr_GOME_WL = np.zeros((2048, 30, 24))  # 辐亮度对应的波长

        self.vec_Solar_L = np.zeros((2, 1024))  # 太阳辐亮度
        self.vec_Solar_WL = np.zeros((2, 1024))  # 太阳辐亮度对应的波长

        # 如下变量是通过卷积计算的
        self.vec_Radiance_Solar = np.zeros(15)
        self.vec_Radiance = np.zeros((15, 30, 24))
        self.arr_Ref = np.zeros((15, 30, 24))

    def init_gome(self, in_proj_cfg, des_sensor):
        '''
        读取yaml格式配置文件
        '''
        if not os.path.isfile(in_proj_cfg):
            print 'Not Found %s' % in_proj_cfg
            sys.exit(-1)

        with open(in_proj_cfg, 'r') as stream:
            cfg = yaml.load(stream)

        self.sat1 = cfg['INFO']['sat']
        self.sensor1 = cfg['INFO']['sensor']
        self.sensor2 = des_sensor
        self.ymd = cfg['INFO']['ymd']

        self.ifile = cfg['PATH']['ipath']
        self.ofile = cfg['PATH']['opath']

        self.cmd = cfg['PROJ']['cmd']
        self.col = cfg['PROJ']['col']
        self.row = cfg['PROJ']['row']
        self.res = cfg['PROJ']['res']

    def read_gome(self, infile):
        # 打开gome文件
        try:
            fp = coda.open(infile)
        except Exception, e:
            print 'Open file error<%s> .' % (e)
            return
        # 获取文件头信息
        product_class = coda.get_product_class(fp)
        product_type = coda.get_product_type(fp)
        product_version = coda.get_product_version(fp)
        product_format = coda.get_product_format(fp)
        product_size = coda.get_product_file_size(fp)
        # EPS = EUMETSAT Polar System atmospheric products (GOME-2 and IASI)
        # EUMETSAT极地大气系统产品(GOME-2和IASI)'
        print 'product_class ', product_class
        print 'product_type', product_type
        print 'product_version', product_version
        print 'product_format', product_format
        print 'product_size', product_size

        CENTRE = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH', 'CENTRE')
        CORNER = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH', 'CORNER')
        SUN_Z = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH',
                           'SOLAR_ZENITH')
        SUN_A = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH',
                           'SOLAR_AZIMUTH')
        SAT_Z = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH',
                           'SAT_ZENITH')
        SAT_A = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'GEO_EARTH',
                           'SAT_AZIMUTH')
        BAND_3 = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'BAND_3')
        BAND_4 = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'BAND_4')
        WAVE_3 = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'WAVELENGTH_3')
        WAVE_4 = coda.fetch(fp, 'MDR', -1, 'Earthshine', 'WAVELENGTH_4')
        LAMBDA_SMR = coda.fetch(fp, 'VIADR_SMR', -1, 'LAMBDA_SMR')
        SMR = coda.fetch(fp, 'VIADR_SMR', -1, 'SMR')
        E_SMR = coda.fetch(fp, 'VIADR_SMR', -1, 'E_SMR')
        E_REL_SUN = coda.fetch(fp, 'VIADR_SMR', -1, 'E_REL_SUN')

        #         fp.close()

        #         print 'CENTRE', CENTRE.shape, CENTRE[0].shape
        #         print 'CORNER', CORNER.shape, CORNER[0].shape
        #         print 'SUN_Z', SUN_Z.shape, SUN_Z[0].shape
        #         print 'SUN_A', SUN_A.shape, SUN_A[0].shape
        #         print 'SAT_Z', SAT_Z.shape, SAT_Z[0].shape
        #         print 'SAT_A', SAT_A.shape, SAT_A[0].shape
        #         print 'BAND_3', BAND_3.shape, BAND_3[0].shape
        #         print 'BAND_4', BAND_4.shape, BAND_4[0].shape
        #         print 'WAVE_3', WAVE_3.shape, WAVE_3[0].shape
        #         print 'WAVE_4', WAVE_4.shape, WAVE_4[0].shape
        #         print 'LAMBDA_SMR', LAMBDA_SMR.shape, LAMBDA_SMR[0].shape
        #         print 'SMR', SMR.shape, SMR[0].shape
        #         print 'E_SMR', E_SMR.shape, E_SMR[0].shape
        #         print 'E_REL_SUN', E_REL_SUN.shape, E_REL_SUN[0].shape
        #         print LAMBDA_SMR[0][0][1023]
        #         print BAND_3[0][23][1023].RAD
        #         print self.band3.shape

        for i in range(30):
            count = coda.get_size(fp, 'MDR', i, 'Earthshine', 'BAND_3')
            for j in range(int(count[0] * 0.75)):
                for m in range(1024):

                    self.band3[i][j][m] = BAND_3[i][j][m].RAD
                    self.band3_ERR_RAD[i][j][m] = BAND_3[i][j][m].ERR_RAD
                    self.band3_STOKES_FRACTION[i][j][m] = BAND_3[i][j][
                        m].STOKES_FRACTION
                    self.band4[i][j][m] = BAND_4[i][j][m].RAD
                    self.band4_ERR_RAD[i][j][m] = BAND_4[i][j][m].ERR_RAD
                    self.band4_STOKES_FRACTION[i][j][m] = BAND_4[i][j][
                        m].STOKES_FRACTION

        for m in range(2048):
            for i in range(30):
                count = coda.get_size(fp, 'MDR', i, 'Earthshine', 'BAND_3')
                for j in range(int(count[0] * 0.75)):
                    if m < 1024:
                        if BAND_3[i][j][m].RAD < 0:
                            BAND_3[i][j][m].RAD = 0
                        self.arr_GOME_L[m][i][j] = (BAND_3[i][j][m].RAD *
                                                    self.k) / WAVE_3[i][m]
                        self.arr_GOME_WL[m][i][j] = WAVE_3[i][m]

                    else:
                        if BAND_4[i][j][m - 1024].RAD < 0:
                            BAND_4[i][j][m - 1024].RAD = 0
                        self.arr_GOME_L[m][i][j] = (
                            BAND_4[i][j][m - 1024].RAD *
                            self.k) / WAVE_4[i][m - 1024]
                        self.arr_GOME_WL[m][i][j] = WAVE_4[i][m - 1024]

        for i in range(2):
            for j in range(1024):
                if i == 0:
                    self.vec_Solar_L[i][j] = (
                        SMR[0][2][j] * self.k) / LAMBDA_SMR[0][2][j]  # 太阳辐亮度
                    self.vec_Solar_WL[i][j] = LAMBDA_SMR[0][2][j]
                    self.E_SMR3[j] = E_SMR[0][2][j]
                    self.E_REL_SUN3[j] = E_REL_SUN[0][2][j]
                elif i == 1:
                    self.vec_Solar_L[i][j] = (
                        SMR[0][3][j] * self.k) / LAMBDA_SMR[0][3][j]  # 太阳辐亮度
                    self.vec_Solar_WL[i][j] = LAMBDA_SMR[0][3][j]
                    self.E_SMR4[j] = E_SMR[0][3][j]
                    self.E_REL_SUN4[j] = E_REL_SUN[0][3][j]

        for i in range(30):
            for j in range(24):
                self.sun_A[i][j] = SUN_A[i][1][j]
                self.sun_Z[i][j] = SUN_Z[i][1][j]
                self.sat_A[i][j] = SAT_A[i][1][j]
                self.sun_Z[i][j] = SAT_Z[i][1][j]
                self.centre_lat[i][j] = CENTRE[i][j].latitude
                self.centre_lon[i][j] = CENTRE[i][j].longitude

        for i in range(30):
            for j in range(24):
                for m in range(4):
                    self.corner_lat[i][j][m] = CORNER[i][m][j].latitude
                    self.corner_lon[i][j][m] = CORNER[i][m][j].longitude
        coda.close(fp)
Пример #18
0
            # Apply bad pixel map
            for chan_num in range(0,8):
                for pixelNr in range(0,1024):
                    if (badpx[chan_num][pixelNr]):
                        pixel_array[chan_num][pixelNr] = NO_VALUE

            coda.cursor_goto_next_array_element(mds_cursor[mds_type])

        if (stateNr < num_states - 1):
            coda.cursor_goto_next_array_element(states_cursor)


if __name__ == "__main__":
    if len(sys.argv) < 2:
        print >>sys.stderr, "Usage: %s <sciamachy level 1b product>" % sys.argv[0]
        sys.exit(1)
        
    coda.set_option_perform_boundary_checks(0)

    pf = coda.open(sys.argv[1])
    product_class = coda.get_product_class(pf)
    product_type = coda.get_product_type(pf)
    if not product_class.startswith("ENVISAT") or product_type != "SCI_NL__1P":
        print >>sys.stderr, "Error: file %s is not a SCIAMACHY Level 1b product (product class = %s, product type = %s)" % (sys.argv[1], product_class, product_type)
        sys.exit(1)

    init(pf)
    process_states(pf)

    coda.close(pf)
Пример #19
0
def main():
    parser = argparse.ArgumentParser(
        'Read star reference spectra from a folder,'
        ' calculate mean, std, and quantiles (0.05, 0.50, 0.95),'
        ' save into a file in numpy format')
    parser.add_argument('--path', help="input folder")
    parser.add_argument('--out', help="output file")
    parser.add_argument(
        '--use-measures',
        action="store_true",
        help="use measurement data instead of the reference star spectrum")
    parser.add_argument('--plot-all',
                        action="store_true",
                        help="plot all spectra and also the result")
    parser.add_argument('--plot', action="store_true", help="plot the result")
    args = parser.parse_args()

    spectra = []
    for file in os.listdir(args.path):
        if file[-3:] != '.N1':
            continue

        #   - see https://github.com/stcorp/codadef-documentation for product_class, product_type, version
        #   - to list field names: coda.get_field_names(h,""): [
        #       'mph', 'sph', 'dsd', 'tra_summary_quality', 'tra_occultation_data',
        #       'tra_nom_wav_assignment', 'tra_ref_star_spectrum', 'tra_ref_atm_dens_profile',
        #       'tra_transmission', 'tra_satu_and_sfa_data', 'tra_auxiliary_data',
        #       'tra_geolocation'
        #     ]
        #   - meanings of the fields: http://envisat.esa.int/handbooks/gomos/CNTR2-2-3.html

        # file, product_class, product_type, version
        h = coda.open_as(os.path.join(args.path, file), 'ENVISAT_GOMOS',
                         'GOM_TRA_1P', 1)

        v = coda.fetch(h, 'sph')
        m = re.match(r'\d*(.*)', v[12].strip())
        bayer = m[1] if m else v[12].strip()

        v = coda.fetch(h, 'tra_ref_star_spectrum')
        spe_ref_raw = np.array(v[0][1]).reshape(
            (1, -1))  # in electrons/pixel/0.5s

        if args.use_measures:
            # the actual measurement data:
            n = 10
            v = coda.fetch(h, 'tra_transmission')
            spe_ref_raw = np.stack([np.array(r[2])
                                    for r in v], axis=0)[:n, :] * spe_ref_raw

        v = coda.fetch(h, 'tra_nom_wav_assignment')
        lam = np.array(v[0][0])  # in nm

        v = coda.fetch(h, 'tra_occultation_data')
        sens_len = v[0][10]  # how many elements the curve has
        sens_lam = np.array(v[0][11])[0:sens_len]  # in nm
        sens_val = np.array(v[0][12])[
            0:sens_len]  # in (photons/s/cm2/nm) / (electrons/pixel/0.5s)

        coda.close(h)

        # hack to alleviate a problem arising from interpolation and a sharp fall in sensitivity around 389nm
        dl = np.array([-0.3, 0, 0.3])
        sens_lam = np.concatenate(
            (sens_lam[:12], sens_lam[12:13] + dl, sens_lam[13:]))
        sens_val = np.concatenate(
            (sens_val[:12], sens_val[11:14], sens_val[13:]))

        interp = interp1d(sens_lam, sens_val, kind='linear')
        spe_ref = spe_ref_raw * interp(lam).reshape((1, -1))

        spectra.append(spe_ref)

        if args.plot_all:
            plt.plot(
                lam.reshape((1, -1)).repeat(len(spe_ref), axis=0), spe_ref)
            # plt.plot(lam, spe_ref_raw)
            # plt.plot(sens_lam, sens_val * 0.8*np.max(spe_ref)/np.max(sens_val), 'x-')
            plt.title(file)
            plt.xlabel('Wavelenth [nm]')
            plt.ylabel('Photon flux density [ph/s/cm2/nm]')
            plt.show()

    if len(spectra) == 0:
        raise Exception('no spectra found in folder %s' % args.path)

    spectra = np.concatenate(spectra, axis=0)
    quantiles = np.quantile(spectra, (0.05, 0.50, 0.95), axis=0)
    mean = np.mean(spectra, axis=0)
    std = np.std(spectra, axis=0)

    outfile = args.out.replace('{{bayer}}', bayer.lower().replace(' ', '_'))
    if outfile[-4:] != '.npz':
        outfile = outfile + '.npz'
    np.savez(outfile,
             bayer=bayer,
             lam=lam,
             mean=mean,
             std=std,
             q05=quantiles[0, :],
             q50=quantiles[1, :],
             q95=quantiles[2, :])

    if args.plot or args.plot_all:
        plt.plot(lam, quantiles[1, :], 'C0-')
        plt.plot(lam, quantiles[0, :], 'C1:')
        plt.plot(lam, quantiles[2, :], 'C1:')
        plt.title('Median spectrum of star %s' % (bayer, ))
        plt.xlabel('Wavelength [nm]')
        plt.ylabel('Photon flux density [ph/s/cm2/nm]')
        plt.show()