예제 #1
0
    def add_variables(dataset, width, height):
        tu.add_geolocation_variables(dataset,
                                     width,
                                     height,
                                     chunksizes=CHUNKING)
        tu.add_quality_flags(dataset, width, height, chunksizes=CHUNKING)

        default_array = DefaultData.create_default_vector(
            height, np.int32, fill_value=4294967295)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, 4294967295)
        variable.attrs["standard_name"] = "time"
        variable.attrs[
            "long_name"] = "Acquisition time in seconds since 1970-01-01 00:00:00"
        tu.add_units(variable, "s")
        dataset["time"] = variable

        default_array = DefaultData.create_default_array(width,
                                                         height,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["coordinates"] = "longitude latitude"
        dataset["aot"] = variable

        dataset["u_independent_aot"] = tu.create_CDR_uncertainty(
            width, height, "Uncertainty of aot due to independent effects")
        dataset["u_structured_aot"] = tu.create_CDR_uncertainty(
            width, height, "Uncertainty of aot due to structured effects")
        dataset["u_common_aot"] = tu.create_CDR_uncertainty(
            width, height, "Uncertainty of aot due to common effects")
예제 #2
0
    def add_quality_flags(dataset, height):
        tu.add_quality_flags(dataset, SWATH_WIDTH, height, chunksizes=CHUNKING_2D)

        default_array = DefaultData.create_default_array(SWATH_WIDTH, height, np.uint16, fill_value=0)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["flag_masks"] = "1, 2, 4, 8, 16"
        variable.attrs["flag_meanings"] = "suspect_mirror suspect_geo suspect_time outlier_nos uncertainty_too_large"
        variable.attrs["standard_name"] = "status_flag"
        tu.add_chunking(variable, CHUNKING_2D)
        tu.add_geolocation_attribute(variable)
        dataset["data_quality_bitmask"] = variable
예제 #3
0
    def test_add_quality_flags(self):
        ds = xr.Dataset()
        TemplateUtil.add_quality_flags(ds, 9, 11)

        quality = ds.variables["quality_pixel_bitmask"]
        self.assertEqual((11, 9), quality.shape)
        self.assertEqual(0, quality.data[5, 5])
        self.assertEqual(np.uint8, quality.dtype)
        self.assertEqual("status_flag", quality.attrs["standard_name"])
        self.assertEqual("1, 2, 4, 8, 16, 32, 64, 128",
                         quality.attrs["flag_masks"])
        self.assertEqual(
            "invalid use_with_caution invalid_input invalid_geoloc invalid_time sensor_error padded_data incomplete_channel_data",
            quality.attrs["flag_meanings"])
예제 #4
0
    def add_variables(dataset, width, height, num_samples=10):
        tu.add_geolocation_variables(dataset, width, height, chunksizes=CHUNKING)
        tu.add_quality_flags(dataset, width, height, chunksizes=CHUNKING)

        default_array = DefaultData.create_default_vector(height, np.int32, fill_value=4294967295)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, 4294967295)
        variable.attrs["standard_name"] = "time"
        variable.attrs["long_name"] = "Acquisition time in seconds since 1970-01-01 00:00:00"
        tu.add_units(variable, "s")
        dataset["time"] = variable

        default_array = DefaultData.create_default_array_3d(width, height, num_samples, np.float32, fill_value=np.NaN)
        variable = Variable(["samples","y", "x"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["standard_name"] = "sea_surface_temperature"
        variable.attrs["units"] = "K"
        variable.attrs["coordinates"] = "longitude latitude"
        dataset["sst"] = variable
예제 #5
0
    def add_variables(dataset, width, height):
        # @todo 1 tb/tb add geolocation 2018-06-25

        tu.add_quality_flags(dataset, width, height, chunksizes=CHUNKING)

        default_array = DefaultData.create_default_vector(height,
                                                          np.int32,
                                                          fill_value=-1)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, -1)
        variable.attrs["standard_name"] = "time"
        variable.attrs[
            "long_name"] = "Acquisition time in seconds since 1970-01-01 00:00:00"
        tu.add_units(variable, "s")
        dataset["time"] = variable

        default_array = DefaultData.create_default_array(width,
                                                         height,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["standard_name"] = "surface_albedo"
        variable.attrs["coordinates"] = "longitude latitude"
        tu.add_chunking(variable, CHUNKING)
        dataset["surface_albedo"] = variable

        dataset["u_independent_surface_albedo"] = tu.create_CDR_uncertainty(
            width, height,
            "Uncertainty of surface_albedo due to independent effects")
        dataset["u_structured_surface_albedo"] = tu.create_CDR_uncertainty(
            width, height,
            "Uncertainty of surface_albedo due to structured effects")
        dataset["u_common_surface_albedo"] = tu.create_CDR_uncertainty(
            width, height,
            "Uncertainty of surface_albedo due to common effects")
예제 #6
0
    def add_original_variables(dataset, height, srf_size=None):
        # height is ignored - supplied just for interface compatibility tb 2017-02-05

        tu.add_quality_flags(dataset, FULL_SIZE, FULL_SIZE, chunksizes=CHUNKSIZES)

        # time
        default_array = DefaultData.create_default_array(IR_SIZE, IR_SIZE, np.uint32)
        variable = Variable([IR_Y_DIMENSION, IR_X_DIMENSION], default_array)
        tu.add_fill_value(variable, DefaultData.get_default_fill_value(np.uint32))
        variable.attrs["standard_name"] = "time"
        variable.attrs["long_name"] = "Acquisition time of pixel"
        tu.add_units(variable, "seconds since 1970-01-01 00:00:00")
        tu.add_offset(variable, TIME_FILL_VALUE)
        tu.add_chunking(variable, CHUNKSIZES)
        dataset["time"] = variable

        dataset["solar_azimuth_angle"] = MVIRI._create_angle_variable_int(0.005493164, standard_name="solar_azimuth_angle", unsigned=True)
        dataset["solar_zenith_angle"] = MVIRI._create_angle_variable_int(0.005493248, standard_name="solar_zenith_angle")
        dataset["satellite_azimuth_angle"] = MVIRI._create_angle_variable_int(0.01, standard_name="sensor_azimuth_angle", long_name="sensor_azimuth_angle", unsigned=True)
        dataset["satellite_zenith_angle"] = MVIRI._create_angle_variable_int(0.01, standard_name="platform_zenith_angle", unsigned=True)

        # count_ir
        default_array = DefaultData.create_default_array(IR_SIZE, IR_SIZE, np.uint8)
        variable = Variable([IR_Y_DIMENSION, IR_X_DIMENSION], default_array)
        tu.add_fill_value(variable, DefaultData.get_default_fill_value(np.uint8))
        variable.attrs["long_name"] = "Infrared Image Counts"
        tu.add_units(variable, "count")
        tu.add_chunking(variable, CHUNKSIZES)
        dataset["count_ir"] = variable

        # count_wv
        default_array = DefaultData.create_default_array(IR_SIZE, IR_SIZE, np.uint8)
        variable = Variable([IR_Y_DIMENSION, IR_X_DIMENSION], default_array)
        tu.add_fill_value(variable, DefaultData.get_default_fill_value(np.uint8))
        variable.attrs["long_name"] = "WV Image Counts"
        tu.add_units(variable, "count")
        tu.add_chunking(variable, CHUNKSIZES)
        dataset["count_wv"] = variable

        default_array = DefaultData.create_default_array(FULL_SIZE, FULL_SIZE, np.uint8, fill_value=0)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["flag_masks"] = "1, 2, 4, 8, 16, 32"
        variable.attrs["flag_meanings"] = "uncertainty_suspicious uncertainty_too_large space_view_suspicious not_on_earth suspect_time suspect_geo"
        variable.attrs["standard_name"] = "status_flag"
        tu.add_chunking(variable, CHUNKSIZES)
        dataset["data_quality_bitmask"] = variable

        # distance_sun_earth
        dataset["distance_sun_earth"] = tu.create_scalar_float_variable(long_name="Sun-Earth distance", units="au")

        # solar_irradiance_vis
        dataset["solar_irradiance_vis"] = tu.create_scalar_float_variable(standard_name="solar_irradiance_vis", long_name="Solar effective Irradiance", units="W*m-2")

        # u_solar_irradiance_vis
        default_array = np.full([], np.NaN, np.float32)
        variable = Variable([], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "Uncertainty in Solar effective Irradiance"
        tu.add_units(variable, "Wm^-2")
        variable.attrs[corr.PIX_CORR_FORM] = corr.RECT_ABS
        variable.attrs[corr.PIX_CORR_UNIT] = corr.PIXEL
        variable.attrs[corr.PIX_CORR_SCALE] = [-np.inf, np.inf]
        variable.attrs[corr.SCAN_CORR_FORM] = corr.RECT_ABS
        variable.attrs[corr.SCAN_CORR_UNIT] = corr.LINE
        variable.attrs[corr.SCAN_CORR_SCALE] = [-np.inf, np.inf]
        variable.attrs[corr.IMG_CORR_FORM] = corr.RECT_ABS
        variable.attrs[corr.IMG_CORR_UNIT] = corr.DAYS
        variable.attrs[corr.IMG_CORR_SCALE] = [-np.inf, np.inf]
        variable.attrs["pdf_shape"] = "rectangle"
        dataset["u_solar_irradiance_vis"] = variable

        if srf_size is None:
            srf_size = SRF_SIZE

        default_array = DefaultData.create_default_array(srf_size, NUM_CHANNELS, np.float32, fill_value=np.NaN)
        variable = Variable(["channel", "n_frequencies"], default_array)
        variable.attrs["long_name"] = 'Spectral Response Function weights'
        variable.attrs["description"] = 'Per channel: weights for the relative spectral response function'
        tu.add_encoding(variable, np.int16, -32768, 0.000033)
        dataset['SRF_weights'] = variable

        default_array = DefaultData.create_default_array(srf_size, NUM_CHANNELS, np.float32, fill_value=np.NaN)
        variable = Variable(["channel", "n_frequencies"], default_array)
        variable.attrs["long_name"] = 'Spectral Response Function frequencies'
        variable.attrs["description"] = 'Per channel: frequencies for the relative spectral response function'
        tu.add_encoding(variable, np.int32, -2147483648, 0.0001)
        tu.add_units(variable, "nm")
        variable.attrs["source"] = "Filename of SRF"
        variable.attrs["Valid(YYYYDDD)"] = "datestring"
        dataset['SRF_frequencies'] = variable

        # srf covariance_
        default_array = DefaultData.create_default_array(srf_size, srf_size, np.float32, fill_value=np.NaN)
        variable = Variable([SRF_VIS_DIMENSION, SRF_VIS_DIMENSION], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "Covariance of the Visible Band Spectral Response Function"
        tu.add_chunking(variable, CHUNKSIZES)
        dataset["covariance_spectral_response_function_vis"] = variable

        # u_srf_ir
        default_array = DefaultData.create_default_vector(srf_size, np.float32, fill_value=np.NaN)
        variable = Variable([SRF_IR_WV_DIMENSION], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "Uncertainty in Spectral Response Function for IR channel"
        dataset["u_spectral_response_function_ir"] = variable

        # u_srf_wv
        default_array = DefaultData.create_default_vector(srf_size, np.float32, fill_value=np.NaN)
        variable = Variable([SRF_IR_WV_DIMENSION], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "Uncertainty in Spectral Response Function for WV channel"
        dataset["u_spectral_response_function_wv"] = variable

        dataset["a_ir"] = tu.create_scalar_float_variable(long_name="Calibration parameter a for IR Band", units="mWm^-2sr^-1cm^-1")
        dataset["b_ir"] = tu.create_scalar_float_variable(long_name="Calibration parameter b for IR Band", units="mWm^-2sr^-1cm^-1/DC")
        dataset["u_a_ir"] = tu.create_scalar_float_variable(long_name="Uncertainty of calibration parameter a for IR Band", units="mWm^-2sr^-1cm^-1")
        dataset["u_b_ir"] = tu.create_scalar_float_variable(long_name="Uncertainty of calibration parameter b for IR Band", units="mWm^-2sr^-1cm^-1/DC")
        dataset["a_wv"] = tu.create_scalar_float_variable(long_name="Calibration parameter a for WV Band", units="mWm^-2sr^-1cm^-1")
        dataset["b_wv"] = tu.create_scalar_float_variable(long_name="Calibration parameter b for WV Band", units="mWm^-2sr^-1cm^-1/DC")
        dataset["u_a_wv"] = tu.create_scalar_float_variable(long_name="Uncertainty of calibration parameter a for WV Band", units="mWm^-2sr^-1cm^-1")
        dataset["u_b_wv"] = tu.create_scalar_float_variable(long_name="Uncertainty of calibration parameter b for WV Band", units="mWm^-2sr^-1cm^-1/DC")
        dataset["bt_a_ir"] = tu.create_scalar_float_variable(long_name="IR Band BT conversion parameter A", units="1")
        dataset["bt_b_ir"] = tu.create_scalar_float_variable(long_name="IR Band BT conversion parameter B", units="1")
        dataset["bt_a_wv"] = tu.create_scalar_float_variable(long_name="WV Band BT conversion parameter A", units="1")
        dataset["bt_b_wv"] = tu.create_scalar_float_variable(long_name="WV Band BT conversion parameter B", units="1")
        dataset["years_since_launch"] = tu.create_scalar_float_variable(long_name="Fractional year since launch of satellite", units="years")

        x_ir_wv_dim = dataset.dims["x_ir_wv"]
        dataset["x_ir_wv"] = Coordinate("x_ir_wv", np.arange(x_ir_wv_dim, dtype=np.uint16))

        y_ir_wv_dim = dataset.dims["y_ir_wv"]
        dataset["y_ir_wv"] = Coordinate("y_ir_wv", np.arange(y_ir_wv_dim, dtype=np.uint16))

        srf_size_dim = dataset.dims["srf_size"]
        dataset["srf_size"] = Coordinate("srf_size", np.arange(srf_size_dim, dtype=np.uint16))
예제 #7
0
    def add_original_variables(dataset,
                               height,
                               srf_size=None,
                               corr_dx=None,
                               corr_dy=None,
                               lut_size=None):
        tu.add_geolocation_variables(dataset, SWATH_WIDTH, height)
        tu.add_quality_flags(dataset, SWATH_WIDTH, height)

        # btemps
        default_array = DefaultData.create_default_array_3d(
            SWATH_WIDTH, height, NUM_CHANNELS, np.float32, np.NaN)
        variable = Variable(["channel", "y", "x"], default_array)
        variable.attrs["standard_name"] = "toa_brightness_temperature"
        tu.add_encoding(variable, np.int32, -999999, scale_factor=0.01)
        tu.add_units(variable, "K")
        variable.attrs["ancillary_variables"] = "chanqual qualind scanqual"
        dataset["btemps"] = variable

        # chanqual
        default_array = DefaultData.create_default_array(
            height,
            NUM_CHANNELS,
            np.int32,
            dims_names=["channel", "y"],
            fill_value=0)
        variable = Variable(["channel", "y"], default_array)
        variable.attrs["standard_name"] = "status_flag"
        variable.attrs["flag_masks"] = "1, 2, 4, 8, 16, 32"
        variable.attrs[
            "flag_meanings"] = "some_bad_prt_temps some_bad_space_view_counts some_bad_bb_counts no_good_prt_temps no_good_space_view_counts no_good_bb_counts"
        dataset["chanqual"] = variable

        # instrtemp
        default_array = DefaultData.create_default_vector(height,
                                                          np.float32,
                                                          fill_value=np.NaN)
        variable = Variable(["y"], default_array)
        tu.add_units(variable, "K")
        tu.add_encoding(variable,
                        np.int32,
                        DefaultData.get_default_fill_value(np.int32),
                        scale_factor=0.01)
        variable.attrs["long_name"] = "instrument_temperature"
        dataset["instrtemp"] = variable

        # qualind
        default_array = DefaultData.create_default_vector(height,
                                                          np.int32,
                                                          fill_value=0)
        variable = Variable(["y"], default_array)
        variable.attrs["standard_name"] = "status_flag"
        variable.attrs[
            "flag_masks"] = "33554432, 67108864, 134217728, 268435456, 536870912, 1073741824, 2147483648"
        variable.attrs[
            "flag_meanings"] = "instr_status_changed first_good_clock_update no_earth_loc no_calib data_gap_precedes time_seq_error not_use_scan"
        dataset["qualind"] = variable

        # scanqual
        default_array = DefaultData.create_default_vector(height,
                                                          np.int32,
                                                          fill_value=0)
        variable = Variable(["y"], default_array)
        variable.attrs["standard_name"] = "status_flag"
        variable.attrs[
            "flag_masks"] = "8, 16, 32, 64, 128, 1024, 2048, 4096, 8192, 16384, 32768, 1048576, 2097152, 4194304, 8388608"
        variable.attrs[
            "flag_meanings"] = "earth_loc_quest_ant_pos earth_loc_quest_reas earth_loc_quest_margin earth_loc_quest_time no_earth_loc_time uncalib_instr_mode uncalib_channels calib_marg_prt uncalib_bad_prt calib_few_scans uncalib_bad_time repeat_scan_times inconsistent_time time_field_bad time_field_inferred"
        dataset["scanqual"] = variable

        # scnlin
        default_array = DefaultData.create_default_vector(height, np.int32)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable,
                          DefaultData.get_default_fill_value(np.int32))
        variable.attrs["long_name"] = "scanline"
        dataset["scnlin"] = variable

        # scnlindy
        default_array = DefaultData.create_default_vector(height, np.int32)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable,
                          DefaultData.get_default_fill_value(np.int32))
        variable.attrs["long_name"] = "Acquisition day of year of scan"
        dataset["scnlindy"] = variable

        # scnlintime
        default_array = DefaultData.create_default_vector(height, np.int32)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable,
                          DefaultData.get_default_fill_value(np.int32))
        variable.attrs[
            "long_name"] = "Acquisition time of scan in milliseconds since beginning of the day"
        tu.add_units(variable, "ms")
        dataset["scnlintime"] = variable

        # scnlinyr
        default_array = DefaultData.create_default_vector(height, np.int32)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable,
                          DefaultData.get_default_fill_value(np.int32))
        variable.attrs["long_name"] = "Acquisition year of scan"
        dataset["scnlinyr"] = variable

        # satellite_azimuth_angle
        variable = AMSUB_MHS.create_angle_variable(height,
                                                   "sensor_azimuth_angle")
        dataset["satellite_azimuth_angle"] = variable

        # satellite_zenith_angle
        variable = AMSUB_MHS.create_angle_variable(height,
                                                   "sensor_zenith_angle")
        dataset["satellite_zenith_angle"] = variable

        # solar_azimuth_angle
        variable = AMSUB_MHS.create_angle_variable(height,
                                                   "solar_azimuth_angle")
        dataset["solar_azimuth_angle"] = variable

        # solar_zenith_angle
        variable = AMSUB_MHS.create_angle_variable(height,
                                                   "solar_zenith_angle")
        dataset["solar_zenith_angle"] = variable

        # acquisition_time
        default_array = DefaultData.create_default_vector(height, np.int32)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable,
                          DefaultData.get_default_fill_value(np.int32))
        variable.attrs["standard_name"] = "time"
        variable.attrs[
            "long_name"] = "Acquisition time in seconds since 1970-01-01 00:00:00"
        tu.add_units(variable, "s")
        dataset["acquisition_time"] = variable
예제 #8
0
    def add_original_variables(dataset, height, srf_size=None):
        tu.add_geolocation_variables(dataset,
                                     SWATH_WIDTH,
                                     height,
                                     chunksizes=CHUNKS_2D)
        tu.add_quality_flags(dataset,
                             SWATH_WIDTH,
                             height,
                             chunksizes=CHUNKS_2D)

        # Time
        default_array = DefaultData.create_default_vector(height,
                                                          np.float64,
                                                          fill_value=np.NaN)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, np.NaN)
        tu.add_units(variable, "s")
        variable.attrs["standard_name"] = "time"
        variable.attrs[
            "long_name"] = "Acquisition time in seconds since 1970-01-01 00:00:00"
        dataset["Time"] = variable

        # relative_azimuth_angle
        default_array = DefaultData.create_default_array(SWATH_WIDTH,
                                                         height,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["standard_name"] = "relative_azimuth_angle"
        tu.add_units(variable, "degree")
        tu.add_encoding(variable,
                        np.int16,
                        DefaultData.get_default_fill_value(np.int16),
                        0.01,
                        chunksizes=CHUNKS_2D)
        variable.attrs["valid_max"] = 18000
        variable.attrs["valid_min"] = -18000
        tu.add_geolocation_attribute(variable)
        dataset["relative_azimuth_angle"] = variable

        # satellite_zenith_angle
        default_array = DefaultData.create_default_array(SWATH_WIDTH,
                                                         height,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["standard_name"] = "sensor_zenith_angle"
        tu.add_units(variable, "degree")
        tu.add_encoding(variable,
                        np.int16,
                        DefaultData.get_default_fill_value(np.int16),
                        0.01,
                        chunksizes=CHUNKS_2D)
        variable.attrs["valid_max"] = 9000
        variable.attrs["valid_min"] = 0
        tu.add_geolocation_attribute(variable)
        dataset["satellite_zenith_angle"] = variable

        # solar_zenith_angle
        default_array = DefaultData.create_default_array(SWATH_WIDTH,
                                                         height,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["standard_name"] = "solar_zenith_angle"
        tu.add_units(variable, "degree")
        tu.add_encoding(variable,
                        np.int16,
                        DefaultData.get_default_fill_value(np.int16),
                        0.01,
                        chunksizes=CHUNKS_2D)
        variable.attrs["valid_max"] = 18000
        variable.attrs["valid_min"] = 0
        tu.add_geolocation_attribute(variable)
        dataset["solar_zenith_angle"] = variable

        dataset["Ch1"] = AVHRR._create_channel_refl_variable(
            height, "Channel 1 Reflectance")
        dataset["Ch2"] = AVHRR._create_channel_refl_variable(
            height, "Channel 2 Reflectance")
        dataset["Ch3a"] = AVHRR._create_channel_refl_variable(
            height, "Channel 3a Reflectance")
        dataset["Ch3b"] = AVHRR._create_channel_bt_variable(
            height, "Channel 3b Brightness Temperature")
        dataset["Ch4"] = AVHRR._create_channel_bt_variable(
            height, "Channel 4 Brightness Temperature")
        dataset["Ch5"] = AVHRR._create_channel_bt_variable(
            height, "Channel 5 Brightness Temperature")

        # data_quality_bitmask
        default_array = DefaultData.create_default_array(SWATH_WIDTH,
                                                         height,
                                                         np.uint8,
                                                         fill_value=0)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["standard_name"] = 'status_flag'
        variable.attrs["long_name"] = 'bitmask for quality per pixel'
        variable.attrs["flag_masks"] = '1,2'
        variable.attrs[
            'flag_meanings'] = 'bad_geolocation_timing_err bad_calibration_radiometer_err'
        tu.add_chunking(variable, CHUNKS_2D)
        tu.add_geolocation_attribute(variable)
        dataset['data_quality_bitmask'] = variable

        default_array = DefaultData.create_default_vector(height,
                                                          np.uint8,
                                                          fill_value=0)
        variable = Variable(["y"], default_array)
        variable.attrs["long_name"] = 'bitmask for quality per scanline'
        variable.attrs["standard_name"] = 'status_flag'
        variable.attrs["flag_masks"] = '1,2,4,8,16,32,64'
        variable.attrs[
            'flag_meanings'] = 'do_not_use bad_time bad_navigation bad_calibration channel3a_present solar_contamination_failure solar_contamination'
        dataset['quality_scanline_bitmask'] = variable

        default_array = DefaultData.create_default_array(N_CHANS,
                                                         height,
                                                         np.uint8,
                                                         fill_value=0)
        variable = Variable(["y", "channel"], default_array)
        variable.attrs["long_name"] = 'bitmask for quality per channel'
        variable.attrs["standard_name"] = 'status_flag'
        variable.attrs["flag_masks"] = '1,2'
        variable.attrs[
            'flag_meanings'] = 'bad_channel some_pixels_not_detected_2sigma'
        dataset['quality_channel_bitmask'] = variable

        if srf_size is None:
            srf_size = MAX_SRF_SIZE

        default_array = DefaultData.create_default_array(srf_size,
                                                         N_CHANS,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["channel", "n_frequencies"], default_array)
        variable.attrs["long_name"] = 'Spectral Response Function weights'
        variable.attrs[
            "description"] = 'Per channel: weights for the relative spectral response function'
        tu.add_encoding(variable, np.int16, -32768, 0.000033)
        dataset['SRF_weights'] = variable

        default_array = DefaultData.create_default_array(srf_size,
                                                         N_CHANS,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["channel", "n_frequencies"], default_array)
        variable.attrs["long_name"] = 'Spectral Response Function wavelengths'
        variable.attrs[
            "description"] = 'Per channel: wavelengths for the relative spectral response function'
        tu.add_encoding(variable, np.int32, -2147483648, 0.0001)
        tu.add_units(variable, "um")
        dataset['SRF_wavelengths'] = variable

        default_vector = DefaultData.create_default_vector(height,
                                                           np.uint8,
                                                           fill_value=255)
        variable = Variable(["y"], default_vector)
        tu.add_fill_value(variable, 255)
        variable.attrs["long_name"] = 'Indicator of original file'
        variable.attrs[
            "description"] = "Indicator for mapping each line to its corresponding original level 1b file. See global attribute 'source' for the filenames. 0 corresponds to 1st listed file, 1 to 2nd file."
        dataset["scanline_map_to_origl1bfile"] = variable

        default_vector = DefaultData.create_default_vector(
            height,
            np.int16,
            fill_value=DefaultData.get_default_fill_value(np.int16))
        variable = Variable(["y"], default_vector)
        tu.add_fill_value(variable,
                          DefaultData.get_default_fill_value(np.int16))
        variable.attrs["long_name"] = 'Original_Scan_line_number'
        variable.attrs[
            "description"] = 'Original scan line numbers from corresponding l1b records'
        dataset["scanline_origl1b"] = variable

        tu.add_coordinates(dataset,
                           ["Ch1", "Ch2", "Ch3a", "Ch3b", "Ch4", "Ch5"])
예제 #9
0
파일: uth.py 프로젝트: gerritholl/FCDRTools
    def add_variables(dataset, width, height):
        WriterUtils.add_gridded_global_attributes(dataset)

        tu.add_gridded_geolocation_variables(dataset, width, height)
        tu.add_quality_flags(dataset, width, height)

        dataset["time_ranges_ascend"] = UTH._create_time_ranges_variable(
            height, width,
            "Minimum and maximum seconds of day pixel contribution time, ascending nodes"
        )
        dataset["time_ranges_descend"] = UTH._create_time_ranges_variable(
            height, width,
            "Minimum and maximum seconds of day pixel contribution time, descending nodes"
        )

        dataset[
            "observation_count_ascend"] = UTH._create_observation_counts_variable(
                height, width,
                "Number of UTH/brightness temperature observations in a grid box for ascending passes"
            )
        dataset[
            "observation_count_descend"] = UTH._create_observation_counts_variable(
                height, width,
                "Number of UTH/brightness temperature observations in a grid box for descending passes"
            )

        dataset["overpass_count_ascend"] = UTH._create_overpass_counts_variable(
            height, width,
            "Number of satellite overpasses in a grid box for ascending passes"
        )
        dataset["overpass_count_descend"] = UTH._create_overpass_counts_variable(
            height, width,
            "Number of satellite overpasses in a grid box for descending passes"
        )

        dataset["uth_ascend"] = UTH._create_uth_variable(
            width,
            height,
            description=
            "Monthly average of all UTH retrievals in a grid box for ascending passes (calculated from daily averages)",
        )
        dataset["uth_descend"] = UTH._create_uth_variable(
            width,
            height,
            description=
            "Monthly average of all UTH retrievals in a grid box for descending passes (calculated from daily averages)"
        )

        dataset["u_independent_uth_ascend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of UTH due to independent effects for ascending passes",
            coordinates="lon lat")
        dataset["u_independent_uth_descend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of UTH due to independent effects for descending passes",
            coordinates="lon lat")
        dataset["u_structured_uth_ascend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of UTH due to structured effects for ascending passes",
            coordinates="lon lat")
        dataset["u_structured_uth_descend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of UTH due to structured effects for descending passes",
            coordinates="lon lat")
        dataset["u_common_uth_ascend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of UTH due to common effects for ascending passes",
            coordinates="lon lat")
        dataset["u_common_uth_descend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of UTH due to common effects for descending passes",
            coordinates="lon lat")

        dataset["uth_inhomogeneity_ascend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Standard deviation of all daily UTH averages which were used to calculate the monthly UTH average in a grid box for ascending passes",
            coordinates="lon lat")
        dataset["uth_inhomogeneity_descend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Standard deviation of all daily UTH averages which were used to calculate the monthly UTH average in a grid box for descending passes",
            coordinates="lon lat")

        dataset["BT_ascend"] = UTH._create_bt_variable(
            width,
            height,
            description=
            "Monthly average of all brightness temperatures which were used to retrieve UTH in a grid box for ascending passes (calculated from daily averages)"
        )
        dataset["BT_descend"] = UTH._create_bt_variable(
            width,
            height,
            description=
            "Monthly average of all brightness temperatures which were used to retrieve UTH in a grid box for descending passes (calculated from daily averages)"
        )

        dataset["u_independent_BT_ascend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of brightness temperature due to independent effects for ascending passes",
            coordinates="lon lat",
            units="K")
        dataset["u_independent_BT_descend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of brightness temperature due to independent effects for descending passes",
            coordinates="lon lat",
            units="K")

        dataset["u_structured_BT_ascend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of brightness temperature due to structured effects for ascending passes",
            coordinates="lon lat",
            units="K")
        dataset["u_structured_BT_descend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of brightness temperature due to structured effects for descending passes",
            coordinates="lon lat",
            units="K")

        dataset["u_common_BT_ascend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of brightness temperature due to common effects for ascending passes",
            coordinates="lon lat",
            units="K")
        dataset["u_common_BT_descend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Uncertainty of brightness temperature due to common effects for descending passes",
            coordinates="lon lat",
            units="K")

        dataset["BT_inhomogeneity_ascend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Standard deviation of all daily brightness temperature averages which were used to calculate the monthly brightness temperature average for ascending passes",
            coordinates="lon lat",
            units="K")
        dataset["BT_inhomogeneity_descend"] = tu.create_CDR_uncertainty(
            width,
            height,
            "Standard deviation of all daily brightness temperature averages which were used to calculate the monthly brightness temperature average for descending passes",
            coordinates="lon lat",
            units="K")

        dataset[
            "observation_count_all_ascend"] = UTH._create_observation_counts_variable(
                height, width,
                "Number of all observations in a grid box for ascending passes - no filtering done"
            )
        dataset[
            "observation_count_all_descend"] = UTH._create_observation_counts_variable(
                height, width,
                "Number of all observations in a grid box for descending passes - no filtering done"
            )
예제 #10
0
    def add_original_variables(dataset, height, srf_size=None):
        tu.add_geolocation_variables(dataset, SWATH_WIDTH, height)
        tu.add_quality_flags(dataset, SWATH_WIDTH, height)

        # Temperature_misc_housekeeping
        default_array = DefaultData.create_default_array(
            height,
            NUM_THERMISTORS,
            np.float32,
            dims_names=["housekeeping", "y"],
            fill_value=np.NaN)
        variable = Variable(["housekeeping", "y"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "TODO"
        variable.attrs["units"] = "TODO"
        dataset["Temperature_misc_housekeeping"] = variable

        # ancil_data
        default_array = DefaultData.create_default_array(
            height,
            ANCIL_VAL,
            np.float64,
            dims_names=["ancil_val", "y"],
            fill_value=np.NaN)
        variable = Variable(["ancil_val", "y"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "Additional per scan information: year, day_of_year, secs_of_day, sat_lat, " \
                                      "sat_long, sat_alt, sat_heading, year, day_of_year, secs_of_day"
        dataset["ancil_data"] = variable

        # channel_quality_flag
        default_array = DefaultData.create_default_array_3d(
            SWATH_WIDTH, height, NUM_CHANNELS, np.float32, np.NaN)
        variable = Variable(["channel", "y", "x"], default_array)
        tu.add_fill_value(variable, np.NaN)
        dataset["channel_quality_flag"] = variable

        # cold_counts
        default_array = DefaultData.create_default_array_3d(
            SWATH_WIDTH, height, CALIB_NUMBER, np.float32, np.NaN)
        variable = Variable(["calib_number", "y", "x"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "TODO"
        dataset["cold_counts"] = variable

        # counts_to_tb_gain
        default_array = DefaultData.create_default_array(
            height,
            NUM_CHANNELS,
            np.float32,
            dims_names=["channel", "y"],
            fill_value=np.NaN)
        variable = Variable([
            "channel",
            "y",
        ], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "TODO"
        dataset["counts_to_tb_gain"] = variable

        # counts_to_tb_offset
        default_array = DefaultData.create_default_array(
            height,
            NUM_CHANNELS,
            np.float32,
            dims_names=["channel", "y"],
            fill_value=np.NaN)
        variable = Variable([
            "channel",
            "y",
        ], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "TODO"
        dataset["counts_to_tb_offset"] = variable

        # gain_control
        default_array = DefaultData.create_default_array(
            height,
            NUM_CHANNELS,
            np.float32,
            dims_names=["channel", "y"],
            fill_value=np.NaN)
        variable = Variable([
            "channel",
            "y",
        ], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "TODO"
        dataset["gain_control"] = variable

        # tb
        default_array = DefaultData.create_default_array_3d(
            SWATH_WIDTH, height, NUM_CHANNELS, np.float32, np.NaN)
        variable = Variable(["channel", "y", "x"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "TODO"
        variable.attrs["standard_name"] = "toa_brightness_temperature"
        tu.add_units(variable, "K")
        dataset["tb"] = variable

        # thermal_reference
        default_array = DefaultData.create_default_vector(
            height, np.float32, np.NaN)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "TODO"
        tu.add_units(variable, "TODO")
        dataset["thermal_reference"] = variable

        # warm_counts
        default_array = DefaultData.create_default_array_3d(
            SWATH_WIDTH, height, CALIB_NUMBER, np.float32, np.NaN)
        variable = Variable(["calib_number", "y", "x"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "TODO"
        dataset["warm_counts"] = variable