Esempio n. 1
0
 def _create_counts_uncertainty_vector_uint32(height, standard_name):
     default_array = DefaultData.create_default_vector(height, np.float32)
     variable = Variable(["y"], default_array)
     tu.add_encoding(variable, np.uint32, DefaultData.get_default_fill_value(np.uint32), 0.01)
     variable.attrs["standard_name"] = standard_name
     tu.add_units(variable, "count")
     return variable
Esempio n. 2
0
    def _create_int32_vector(height, standard_name=None, long_name=None, orig_name=None):
        default_array = DefaultData.create_default_vector(height, np.int32)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, DefaultData.get_default_fill_value(np.int32))
        HIRS._set_name_attributes(long_name, orig_name, standard_name, variable)

        return variable
Esempio n. 3
0
    def _create_angle_variable_int(scale_factor,
                                   standard_name=None,
                                   long_name=None,
                                   unsigned=False,
                                   fill_value=None):
        default_array = DefaultData.create_default_array(TIE_SIZE,
                                                         TIE_SIZE,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["y_tie", "x_tie"], default_array)

        if unsigned is True:
            data_type = np.uint16
        else:
            data_type = np.int16

        if fill_value is None:
            fill_value = DefaultData.get_default_fill_value(data_type)

        if standard_name is not None:
            variable.attrs["standard_name"] = standard_name

        if long_name is not None:
            variable.attrs["long_name"] = long_name

        tu.add_units(variable, "degree")
        variable.attrs["tie_points"] = "true"
        tu.add_encoding(variable,
                        data_type,
                        fill_value,
                        scale_factor,
                        chunksizes=CHUNKSIZES)
        return variable
Esempio n. 4
0
    def create_float_variable(width,
                              height,
                              standard_name=None,
                              long_name=None,
                              dim_names=None,
                              fill_value=None):
        if fill_value is None:
            default_array = DefaultData.create_default_array(
                width, height, np.float32)
        else:
            default_array = DefaultData.create_default_array(
                width, height, np.float32, fill_value=fill_value)

        if dim_names is None:
            variable = Variable(["y", "x"], default_array)
        else:
            variable = Variable(dim_names, default_array)

        if fill_value is None:
            variable.attrs["_FillValue"] = DefaultData.get_default_fill_value(
                np.float32)
        else:
            variable.attrs["_FillValue"] = fill_value

        if standard_name is not None:
            variable.attrs["standard_name"] = standard_name

        if long_name is not None:
            variable.attrs["long_name"] = long_name

        return variable
Esempio n. 5
0
    def _create_scaled_int16_vector(height, standard_name=None, original_name=None, long_name=None, scale_factor=0.01):
        default_array = DefaultData.create_default_vector(height, np.float32)
        variable = Variable(["y"], default_array)
        tu.add_encoding(variable, np.int16, DefaultData.get_default_fill_value(np.int16), scale_factor)
        HIRS._set_name_attributes(long_name, original_name, standard_name, variable)

        return variable
Esempio n. 6
0
    def _assert_angle_variables(self, ds):
        satellite_zenith_angle = ds.variables["satellite_zenith_angle"]
        self.assertEqual((6, ), satellite_zenith_angle.shape)
        self.assertTrue(np.isnan(satellite_zenith_angle.data[3]))
        self.assertEqual(np.uint16, satellite_zenith_angle.encoding['dtype'])
        self.assertEqual(DefaultData.get_default_fill_value(np.uint16),
                         satellite_zenith_angle.encoding['_FillValue'])
        self.assertEqual(0.01, satellite_zenith_angle.encoding['scale_factor'])
        self.assertEqual(-180.0, satellite_zenith_angle.encoding['add_offset'])
        self.assertEqual("platform_zenith_angle",
                         satellite_zenith_angle.attrs["standard_name"])
        self.assertEqual("degree", satellite_zenith_angle.attrs["units"])
        self.assertEqual("longitude latitude",
                         satellite_zenith_angle.attrs["coordinates"])

        solar_azimuth_angle = ds.variables["solar_azimuth_angle"]
        self.assertEqual((6, 56), solar_azimuth_angle.shape)
        self.assertTrue(np.isnan(solar_azimuth_angle.data[4, 4]))
        self.assertEqual(np.uint16, solar_azimuth_angle.encoding['dtype'])
        self.assertEqual(DefaultData.get_default_fill_value(np.uint16),
                         solar_azimuth_angle.encoding['_FillValue'])
        self.assertEqual(0.01, solar_azimuth_angle.encoding['scale_factor'])
        self.assertEqual(-180.0, solar_azimuth_angle.encoding['add_offset'])
        self.assertEqual(CHUNKING_2D,
                         solar_azimuth_angle.encoding['chunksizes'])
        self.assertEqual("solar_azimuth_angle",
                         solar_azimuth_angle.attrs["standard_name"])
        self.assertEqual("degree", solar_azimuth_angle.attrs["units"])
        self.assertEqual("longitude latitude",
                         solar_azimuth_angle.attrs["coordinates"])
Esempio n. 7
0
    def add_easy_fcdr_variables(dataset,
                                height,
                                corr_dx=None,
                                corr_dy=None,
                                lut_size=None):
        default_array = DefaultData.create_default_array_3d(SWATH_WIDTH,
                                                            height,
                                                            NUM_CHANNELS,
                                                            np.float32,
                                                            fill_value=np.NaN)
        variable = Variable(["channel", "y", "x"], default_array)
        tu.add_fill_value(variable, np.NaN)
        tu.add_units(variable, "K")
        variable.attrs["long_name"] = "independent uncertainty per pixel"
        dataset["u_independent_tb"] = variable

        default_array = DefaultData.create_default_array_3d(SWATH_WIDTH,
                                                            height,
                                                            NUM_CHANNELS,
                                                            np.float32,
                                                            fill_value=np.NaN)
        variable = Variable(["channel", "y", "x"], default_array)
        tu.add_fill_value(variable, np.NaN)
        tu.add_units(variable, "K")
        variable.attrs["long_name"] = "structured uncertainty per pixel"
        dataset["u_structured_tb"] = variable
Esempio n. 8
0
    def add_gridded_geolocation_variables(dataset, width, height):
        default_array = DefaultData.create_default_vector(height, np.float32, fill_value=np.NaN)
        variable = Variable(["y"], default_array)
        TemplateUtil.add_fill_value(variable, np.NaN)
        variable.attrs["standard_name"] = LAT_NAME
        variable.attrs["long_name"] = LAT_NAME
        variable.attrs["bounds"] = "lat_bnds"
        TemplateUtil.add_units(variable, LATITUDE_UNIT)
        dataset["lat"] = variable

        default_array = DefaultData.create_default_array(2, height, np.float32, fill_value=np.NaN)
        variable = Variable(["y", "bounds"], default_array)
        TemplateUtil.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "latitude cell boundaries"
        TemplateUtil.add_units(variable, LATITUDE_UNIT)
        dataset["lat_bnds"] = variable

        default_array = DefaultData.create_default_vector(width, np.float32, fill_value=np.NaN)
        variable = Variable(["x"], default_array)
        TemplateUtil.add_fill_value(variable, np.NaN)
        variable.attrs["standard_name"] = LON_NAME
        variable.attrs["long_name"] = LON_NAME
        TemplateUtil.add_units(variable, LONGITUDE_UNIT)
        variable.attrs["bounds"] = "lon_bnds"
        dataset["lon"] = variable

        default_array = DefaultData.create_default_array(2, width, np.float32, fill_value=np.NaN)
        variable = Variable(["x", "bounds"], default_array)
        TemplateUtil.add_fill_value(variable, np.NaN)
        TemplateUtil.add_units(variable, LONGITUDE_UNIT)
        variable.attrs["long_name"] = "longitude cell boundaries"
        dataset["lon_bnds"] = variable
Esempio n. 9
0
    def _create_refl_uncertainty_variable(height,
                                          long_name=None,
                                          structured=False):
        default_array = DefaultData.create_default_array(SWATH_WIDTH,
                                                         height,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)

        tu.add_units(variable, "percent")
        tu.add_geolocation_attribute(variable)
        variable.attrs["long_name"] = long_name

        if structured:
            tu.add_encoding(variable,
                            np.int16,
                            DefaultData.get_default_fill_value(np.int16),
                            0.01,
                            chunksizes=CHUNKS_2D)
            variable.attrs["valid_min"] = 3
            variable.attrs["valid_max"] = 5
        else:
            tu.add_encoding(variable,
                            np.int16,
                            DefaultData.get_default_fill_value(np.int16),
                            0.00001,
                            chunksizes=CHUNKS_2D)
            variable.attrs["valid_max"] = 1000
            variable.attrs["valid_min"] = 10
        return variable
Esempio n. 10
0
    def add_original_variables(dataset, height, srf_size=None):
        # height is ignored - supplied just for interface compatibility tb 2017-07-19
        # latitude_vis
        default_array = DefaultData.create_default_array(FULL_SIZE, FULL_SIZE, np.float32, fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["standard_name"] = "latitude"
        tu.add_units(variable, "degrees_north")
        tu.add_encoding(variable, np.int16, -32768, scale_factor=0.0027466658)
        dataset["latitude_vis"] = variable

        # longitude_vis
        default_array = DefaultData.create_default_array(FULL_SIZE, FULL_SIZE, np.float32, fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["standard_name"] = "longitude"
        tu.add_units(variable, "degrees_east")
        tu.add_encoding(variable, np.int16, -32768, scale_factor=0.0054933317)
        dataset["longitude_vis"] = variable

        # latitude_ir_wv
        default_array = DefaultData.create_default_array(IR_SIZE, IR_SIZE, np.float32, fill_value=np.NaN)
        variable = Variable([IR_X_DIMENSION, IR_X_DIMENSION], default_array)
        variable.attrs["standard_name"] = "latitude"
        tu.add_units(variable, "degrees_north")
        tu.add_encoding(variable, np.int16, -32768, scale_factor=0.0027466658)
        dataset["latitude_ir_wv"] = variable

        # longitude_ir_wv
        default_array = DefaultData.create_default_array(IR_SIZE, IR_SIZE, np.float32, fill_value=np.NaN)
        variable = Variable([IR_X_DIMENSION, IR_X_DIMENSION], default_array)
        variable.attrs["standard_name"] = "longitude"
        tu.add_units(variable, "degrees_east")
        tu.add_encoding(variable, np.int16, -32768, scale_factor=0.0054933317)
        dataset["longitude_ir_wv"] = variable
Esempio n. 11
0
    def add_variables(dataset, width, height):
        tu.add_geolocation_variables(dataset,
                                     width,
                                     height,
                                     chunksizes=CHUNKING)
        tu.add_quality_flags(dataset, width, height, chunksizes=CHUNKING)

        default_array = DefaultData.create_default_vector(
            height, np.int32, fill_value=4294967295)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, 4294967295)
        variable.attrs["standard_name"] = "time"
        variable.attrs[
            "long_name"] = "Acquisition time in seconds since 1970-01-01 00:00:00"
        tu.add_units(variable, "s")
        dataset["time"] = variable

        default_array = DefaultData.create_default_array(width,
                                                         height,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["coordinates"] = "longitude latitude"
        dataset["aot"] = variable

        dataset["u_independent_aot"] = tu.create_CDR_uncertainty(
            width, height, "Uncertainty of aot due to independent effects")
        dataset["u_structured_aot"] = tu.create_CDR_uncertainty(
            width, height, "Uncertainty of aot due to structured effects")
        dataset["u_common_aot"] = tu.create_CDR_uncertainty(
            width, height, "Uncertainty of aot due to common effects")
Esempio n. 12
0
    def add_geolocation_variables(dataset, width, height, chunksizes=None):
        default_array = DefaultData.create_default_array(width,
                                                         height,
                                                         np.float32,
                                                         fill_value=np.NaN)

        variable = Variable(["y", "x"], default_array)
        variable.attrs["standard_name"] = LAT_NAME
        TemplateUtil.add_units(variable, LATITUDE_UNIT)
        TemplateUtil.add_encoding(variable,
                                  np.int16,
                                  -32768,
                                  scale_factor=0.0027466658,
                                  chunksizes=chunksizes)
        dataset[LAT_NAME] = variable

        default_array = DefaultData.create_default_array(width,
                                                         height,
                                                         np.float32,
                                                         fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["standard_name"] = LON_NAME
        TemplateUtil.add_units(variable, LONGITUDE_UNIT)
        TemplateUtil.add_encoding(variable,
                                  np.int16,
                                  -32768,
                                  scale_factor=0.0054933317,
                                  chunksizes=chunksizes)
        dataset[LON_NAME] = variable
Esempio n. 13
0
    def assert_common_angles(self, ds, chunking=None):
        satellite_zenith_angle = ds.variables["satellite_zenith_angle"]
        self.assertEqual((6, 56), satellite_zenith_angle.shape)
        self.assertTrue(np.isnan(satellite_zenith_angle.data[2, 2]))
        self.assertEqual(np.uint16, satellite_zenith_angle.encoding['dtype'])
        self.assertEqual(DefaultData.get_default_fill_value(np.uint16), satellite_zenith_angle.encoding['_FillValue'])
        self.assertEqual(0.01, satellite_zenith_angle.encoding['scale_factor'])
        self.assertEqual(0, satellite_zenith_angle.encoding['add_offset'])
        if chunking is not None:
            self.assertEqual(chunking, satellite_zenith_angle.encoding['chunksizes'])
        self.assertEqual("platform_zenith_angle", satellite_zenith_angle.attrs["standard_name"])
        self.assertEqual("degree", satellite_zenith_angle.attrs["units"])
        self.assertEqual("longitude latitude", satellite_zenith_angle.attrs["coordinates"])
        self.assertEqual([0, 180], satellite_zenith_angle.attrs["valid_range"])

        solar_zenith_angle = ds.variables["solar_zenith_angle"]
        self.assertEqual((6, 56), solar_zenith_angle.shape)
        self.assertTrue(np.isnan(solar_zenith_angle.data[3, 3]))
        self.assertEqual(np.uint16, solar_zenith_angle.encoding['dtype'])
        self.assertEqual(DefaultData.get_default_fill_value(np.uint16), solar_zenith_angle.encoding['_FillValue'])
        self.assertEqual(0.01, solar_zenith_angle.encoding['scale_factor'])
        self.assertEqual(0, solar_zenith_angle.encoding['add_offset'])
        if chunking is not None:
            self.assertEqual(chunking, solar_zenith_angle.encoding['chunksizes'])
        self.assertEqual("solar_zenith_angle", solar_zenith_angle.attrs["standard_name"])
        self.assertEqual("solar_zenith_angle", solar_zenith_angle.attrs["orig_name"])
        self.assertEqual("degree", solar_zenith_angle.attrs["units"])
        self.assertEqual("longitude latitude", solar_zenith_angle.attrs["coordinates"])
        self.assertEqual([0, 180], solar_zenith_angle.attrs["valid_range"])

        satellite_azimuth_angle = ds.variables["satellite_azimuth_angle"]
        self.assertEqual((6, 56), satellite_azimuth_angle.shape)
        self.assertTrue(np.isnan(satellite_azimuth_angle.data[5, 5]))
        self.assertEqual(np.uint16, satellite_azimuth_angle.encoding['dtype'])
        self.assertEqual(DefaultData.get_default_fill_value(np.uint16), satellite_azimuth_angle.encoding['_FillValue'])
        self.assertEqual(0.01, satellite_azimuth_angle.encoding['scale_factor'])
        self.assertEqual(0, satellite_azimuth_angle.encoding['add_offset'])
        if chunking is not None:
            self.assertEqual(chunking, satellite_azimuth_angle.encoding['chunksizes'])
        self.assertEqual("sensor_azimuth_angle", satellite_azimuth_angle.attrs["standard_name"])
        self.assertEqual([0, 360], satellite_azimuth_angle.attrs["valid_range"])
        self.assertEqual("clockwise from north", satellite_azimuth_angle.attrs["comment"])
        self.assertEqual("degree", satellite_azimuth_angle.attrs["units"])
        self.assertEqual("longitude latitude", satellite_azimuth_angle.attrs["coordinates"])

        solar_azimuth_angle = ds.variables["solar_azimuth_angle"]
        self.assertEqual((6, 56), solar_azimuth_angle.shape)
        self.assertTrue(np.isnan(solar_azimuth_angle.data[4, 4]))
        self.assertEqual(np.uint16, solar_azimuth_angle.encoding['dtype'])
        self.assertEqual(DefaultData.get_default_fill_value(np.uint16), solar_azimuth_angle.encoding['_FillValue'])
        self.assertEqual(0.01, solar_azimuth_angle.encoding['scale_factor'])
        self.assertEqual(0, solar_azimuth_angle.encoding['add_offset'])
        if chunking is not None:
            self.assertEqual(chunking, solar_azimuth_angle.encoding['chunksizes'])
        self.assertEqual("solar_azimuth_angle", solar_azimuth_angle.attrs["standard_name"])
        self.assertEqual([0, 360], solar_azimuth_angle.attrs["valid_range"])
        self.assertEqual("clockwise from north", solar_azimuth_angle.attrs["comment"])
        self.assertEqual("degree", solar_azimuth_angle.attrs["units"])
        self.assertEqual("longitude latitude", solar_azimuth_angle.attrs["coordinates"])
Esempio n. 14
0
    def add_common_sensor_variables(dataset, height, srf_size):
        # scanline
        default_array = DefaultData.create_default_vector(height, np.int16)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, DefaultData.get_default_fill_value(np.int16))
        variable.attrs["long_name"] = "scanline_number"
        tu.add_units(variable, "count")
        dataset["scanline"] = variable
        # time
        default_array = DefaultData.create_default_vector(height, np.uint32)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, DefaultData.get_default_fill_value(np.uint32))
        variable.attrs["standard_name"] = "time"
        variable.attrs["long_name"] = "Acquisition time in seconds since 1970-01-01 00:00:00"
        tu.add_units(variable, "s")
        dataset["time"] = variable
        # quality_scanline_bitmask
        default_array = DefaultData.create_default_vector(height, np.int32, fill_value=0)
        variable = Variable(["y"], default_array)
        variable.attrs["standard_name"] = "status_flag"
        variable.attrs["long_name"] = "quality_indicator_bitfield"
        variable.attrs[
            "flag_masks"] = "1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 65536, 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864, 134217728, 268435456, 536870912 1073741824"
        variable.attrs[
            "flag_meanings"] = "do_not_use_scan time_sequence_error data_gap_preceding_scan no_calibration no_earth_location clock_update status_changed line_incomplete, time_field_bad time_field_bad_not_inf inconsistent_sequence scan_time_repeat uncalib_bad_time calib_few_scans uncalib_bad_prt calib_marginal_prt uncalib_channels uncalib_inst_mode quest_ant_black_body zero_loc bad_loc_time bad_loc_marginal bad_loc_reason bad_loc_ant reduced_context bad_temp_no_rself"
        dataset["quality_scanline_bitmask"] = variable

        default_array = DefaultData.create_default_array(srf_size, NUM_CHANNELS, np.float32, fill_value=np.NaN)
        variable = Variable(["channel", "n_frequencies"], default_array)
        variable.attrs["long_name"] = 'Spectral Response Function weights'
        variable.attrs["description"] = 'Per channel: weights for the relative spectral response function'
        tu.add_encoding(variable, np.int16, -32768, 0.000033)
        dataset['SRF_weights'] = variable

        default_array = DefaultData.create_default_array(srf_size, NUM_CHANNELS, np.float32, fill_value=np.NaN)
        variable = Variable(["channel", "n_frequencies"], default_array)
        variable.attrs["long_name"] = 'Spectral Response Function wavelengths'
        variable.attrs["description"] = 'Per channel: wavelengths for the relative spectral response function'
        tu.add_encoding(variable, np.int32, -2147483648, 0.0001)
        tu.add_units(variable, "um")
        dataset['SRF_wavelengths'] = variable

        default_vector = DefaultData.create_default_vector(height, np.uint8, fill_value=255)
        variable = Variable(["y"], default_vector)
        tu.add_fill_value(variable, 255)
        variable.attrs["long_name"] = 'Indicator of original file'
        variable.attrs[
            "description"] = "Indicator for mapping each line to its corresponding original level 1b file. See global attribute 'source' for the filenames. 0 corresponds to 1st listed file, 1 to 2nd file."
        dataset["scanline_map_to_origl1bfile"] = variable

        default_vector = DefaultData.create_default_vector(height, np.int16, fill_value=DefaultData.get_default_fill_value(np.int16))
        variable = Variable(["y"], default_vector)
        tu.add_fill_value(variable, DefaultData.get_default_fill_value(np.int16))
        variable.attrs["long_name"] = 'Original_Scan_line_number'
        variable.attrs["description"] = 'Original scan line numbers from corresponding l1b records'
        dataset["scanline_origl1b"] = variable
Esempio n. 15
0
 def _create_easy_fcdr_variable(height, long_name):
     default_array = DefaultData.create_default_array_3d(SWATH_WIDTH, height, NUM_CHANNELS, np.float32, np.NaN)
     variable = Variable(["channel", "y", "x"], default_array)
     tu.add_encoding(variable, np.uint16, DefaultData.get_default_fill_value(np.uint16), 0.001, chunksizes=CHUNKING_BT)
     variable.attrs["long_name"] = long_name
     tu.add_units(variable, "K")
     tu.add_geolocation_attribute(variable)
     variable.attrs["valid_min"] = 1
     variable.attrs["valid_max"] = 65534
     return variable
Esempio n. 16
0
 def _create_float32_vector(fill_value, height, long_name, orig_name):
     default_array = DefaultData.create_default_vector(height, np.float32, fill_value=fill_value)
     variable = Variable(["y"], default_array)
     if fill_value is None:
         tu.add_fill_value(variable, DefaultData.get_default_fill_value(np.float32))
     else:
         tu.add_fill_value(variable, fill_value)
     variable.attrs["long_name"] = long_name
     if orig_name is not None:
         variable.attrs["orig_name"] = orig_name
     return variable
Esempio n. 17
0
 def _assert_correct_counts_variable(self, ds, name, long_name):
     variable = ds.variables[name]
     self.assertEqual((5, 409), variable.shape)
     self.assertEqual(DefaultData.get_default_fill_value(np.int32),
                      variable.data[3, 306])
     self.assertEqual(DefaultData.get_default_fill_value(np.int32),
                      variable.attrs["_FillValue"])
     self.assertEqual(long_name, variable.attrs["long_name"])
     self.assertEqual("count", variable.attrs["units"])
     self.assertEqual("longitude latitude", variable.attrs["coordinates"])
     self.assertEqual(CHUNKING, variable.encoding["chunksizes"])
Esempio n. 18
0
    def add_common_sensor_variables(dataset, height, srf_size):
        # scanline
        default_array = DefaultData.create_default_vector(height, np.int16)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, DefaultData.get_default_fill_value(np.int16))
        variable.attrs["long_name"] = "scanline_number"
        tu.add_units(variable, "count")
        dataset["scanline"] = variable
        # time
        default_array = DefaultData.create_default_vector(height, np.datetime64)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, 4294967295)
        variable.attrs["standard_name"] = "time"
        variable.attrs["long_name"] = "Acquisition time in seconds since 1970-01-01 00:00:00"
        # do not set 'units' of "_FillValue" here, xarray sets this from encoding upon storing the file
        tu.add_encoding(variable, np.uint32, None, scale_factor=0.1)
        variable.encoding["units"] = "seconds since 1970-01-01 00:00:00"
        # encoding 'add_offset' varies per file and either needs to be set
        # by the user or intelligently in fiduceo.fcdr.writer.fcdr_writer.FCDRWriter.write
        dataset["time"] = variable
        # quality_scanline_bitmask
        default_array = DefaultData.create_default_vector(height, np.int32, fill_value=0)
        variable = Variable(["y"], default_array)
        variable.attrs["standard_name"] = "status_flag"
        variable.attrs["long_name"] = "quality_indicator_bitfield"
        variable.attrs[
            "flag_masks"] = "1, 2, 4, 8, 16"
        variable.attrs["flag_meanings"] = "do_not_use_scan reduced_context bad_temp_no_rself suspect_geo suspect_time" 
        dataset["quality_scanline_bitmask"] = variable

        default_array = DefaultData.create_default_array(srf_size, NUM_CHANNELS, np.float32, fill_value=np.NaN)
        variable = Variable(["channel", "n_wavelengths"], default_array)
        variable.attrs["long_name"] = 'Spectral Response Function weights'
        variable.attrs["description"] = 'Per channel: weights for the relative spectral response function'
        tu.add_encoding(variable, np.int16, -32768, 0.000033)
        dataset['SRF_weights'] = variable

        default_array = DefaultData.create_default_array(srf_size, NUM_CHANNELS, np.float32, fill_value=np.NaN)
        variable = Variable(["channel", "n_wavelengths"], default_array)
        variable.attrs["long_name"] = 'Spectral Response Function wavelengths'
        variable.attrs["description"] = 'Per channel: wavelengths for the relative spectral response function'
        tu.add_encoding(variable, np.int32, -2147483648, 0.0001)
        tu.add_units(variable, "um")
        dataset['SRF_wavelengths'] = variable

        default_vector = DefaultData.create_default_vector(height, np.uint8, fill_value=255)
        variable = Variable(["y"], default_vector)
        tu.add_fill_value(variable, 255)
        variable.attrs["long_name"] = 'Indicator of original file'
        variable.attrs[
            "description"] = "Indicator for mapping each line to its corresponding original level 1b file. See global attribute 'source' for the filenames. 0 corresponds to 1st listed file, 1 to 2nd file."
        dataset["scanline_map_to_origl1bfile"] = variable

        default_vector = DefaultData.create_default_vector(height, np.int16, fill_value=DefaultData.get_default_fill_value(np.int16))
        variable = Variable(["y"], default_vector)
        tu.add_fill_value(variable, DefaultData.get_default_fill_value(np.int16))
        variable.attrs["long_name"] = 'Original_Scan_line_number'
        variable.attrs["description"] = 'Original scan line numbers from corresponding l1b records'
        dataset["scanline_origl1b"] = variable
Esempio n. 19
0
 def _create_overpass_counts_variable(height, width, description):
     fill_value = DefaultData.get_default_fill_value(np.uint8)
     default_array = DefaultData.create_default_array(width,
                                                      height,
                                                      np.uint8,
                                                      fill_value=fill_value)
     variable = Variable(["y", "x"], default_array)
     tu.add_fill_value(variable, fill_value)
     variable.attrs["description"] = description
     variable.attrs["coordinates"] = "lon lat"
     return variable
Esempio n. 20
0
 def _create_counts_variable(height, long_name):
     default_array = DefaultData.create_default_array(
         SWATH_WIDTH, height, np.int32)
     variable = Variable(["y", "x"], default_array)
     tu.add_fill_value(variable,
                       DefaultData.get_default_fill_value(np.int32))
     variable.attrs["long_name"] = long_name
     tu.add_units(variable, "count")
     tu.add_geolocation_attribute(variable)
     tu.add_chunking(variable, CHUNKS_2D)
     return variable
Esempio n. 21
0
    def _create_geo_angle_variable(standard_name, height, orig_name=None, chunking=None):
        default_array = DefaultData.create_default_array(SWATH_WIDTH, height, np.float32, fill_value=np.NaN)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["standard_name"] = standard_name
        if orig_name is not None:
            variable.attrs["orig_name"] = orig_name

        tu.add_units(variable, "degree")
        tu.add_geolocation_attribute(variable)
        tu.add_encoding(variable, np.uint16, DefaultData.get_default_fill_value(np.uint16), 0.01, -180.0, chunking)
        return variable
Esempio n. 22
0
    def add_lookup_tables(dataset, num_channels, lut_size):
        default_array = DefaultData.create_default_array(num_channels, lut_size, np.float32, fill_value=np.NaN)
        variable = Variable(["lut_size", "channel"], default_array)
        TemplateUtil.add_fill_value(variable, np.NaN)
        variable.attrs["description"] = "Lookup table to convert radiance to brightness temperatures"
        dataset['lookup_table_BT'] = variable

        default_array = DefaultData.create_default_array(num_channels, lut_size, np.float32, fill_value=np.NaN)
        variable = Variable(["lut_size", "channel"], default_array)
        TemplateUtil.add_fill_value(variable, np.NaN)
        variable.attrs["description"] = "Lookup table to convert brightness temperatures to radiance"
        dataset['lookup_table_radiance'] = variable
Esempio n. 23
0
    def add_correlation_coefficients(dataset, num_channels, delta_x, delta_y):
        default_array = DefaultData.create_default_array(num_channels, delta_x, np.float32, fill_value=np.NaN)
        variable = Variable(["delta_x", "channel"], default_array)
        TemplateUtil.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "cross_element_correlation_coefficients"
        variable.attrs["description"] = "Correlation coefficients per channel for scanline correlation"
        dataset['cross_element_correlation_coefficients'] = variable

        default_array = DefaultData.create_default_array(num_channels, delta_y, np.float32, fill_value=np.NaN)
        variable = Variable(["delta_y", "channel"], default_array)
        TemplateUtil.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "cross_line_correlation_coefficients"
        variable.attrs["description"] = "Correlation coefficients per channel for inter scanline correlation"
        dataset['cross_line_correlation_coefficients'] = variable
Esempio n. 24
0
    def _add_angle_variables(dataset, height):
        default_array = DefaultData.create_default_vector(height,
                                                          np.float32,
                                                          fill_value=np.NaN)
        variable = Variable(["y"], default_array)
        variable.attrs["standard_name"] = "platform_zenith_angle"
        tu.add_units(variable, "degree")
        tu.add_geolocation_attribute(variable)
        tu.add_encoding(variable, np.uint16,
                        DefaultData.get_default_fill_value(np.uint16), 0.01,
                        -180.0)
        dataset["satellite_zenith_angle"] = variable

        dataset["solar_azimuth_angle"] = HIRS._create_geo_angle_variable(
            "solar_azimuth_angle", height, chunking=CHUNKING_2D)
Esempio n. 25
0
    def add_full_fcdr_variables(dataset, height):
        # u_btemps
        variable = AMSUB_MHS._create_3d_float_variable(height)
        variable.attrs[
            "long_name"] = "total uncertainty of brightness temperature"
        tu.add_units(variable, "K")
        dataset["u_btemps"] = variable

        # u_syst_btemps
        variable = AMSUB_MHS._create_3d_float_variable(height)
        variable.attrs[
            "long_name"] = "systematic uncertainty of brightness temperature"
        tu.add_units(variable, "K")
        dataset["u_syst_btemps"] = variable

        # u_random_btemps
        variable = AMSUB_MHS._create_3d_float_variable(height)
        variable.attrs["long_name"] = "noise on brightness temperature"
        tu.add_units(variable, "K")
        dataset["u_random_btemps"] = variable

        # u_instrtemp
        default_array = DefaultData.create_default_vector(height,
                                                          np.float32,
                                                          fill_value=np.NaN)
        variable = Variable(["y"], default_array)
        tu.add_fill_value(variable, np.NaN)
        variable.attrs["long_name"] = "uncertainty of instrument temperature"
        tu.add_units(variable, "K")
        dataset["u_instrtemp"] = variable

        # u_latitude
        variable = AMSUB_MHS.create_angle_uncertainty_variable(
            "latitude", height)
        dataset["u_latitude"] = variable

        # u_longitude
        variable = AMSUB_MHS.create_angle_uncertainty_variable(
            "longitude", height)
        dataset["u_longitude"] = variable

        # u_satellite_azimuth_angle
        variable = AMSUB_MHS.create_angle_uncertainty_variable(
            "satellite azimuth angle", height)
        dataset["u_satellite_azimuth_angle"] = variable

        # u_satellite_zenith_angle
        variable = AMSUB_MHS.create_angle_uncertainty_variable(
            "satellite zenith angle", height)
        dataset["u_satellite_zenith_angle"] = variable

        # u_solar_azimuth_angle
        variable = AMSUB_MHS.create_angle_uncertainty_variable(
            "solar azimuth angle", height)
        dataset["u_solar_azimuth_angle"] = variable

        # u_solar_zenith_angle
        variable = AMSUB_MHS.create_angle_uncertainty_variable(
            "solar zenith angle", height)
        dataset["u_solar_zenith_angle"] = variable
Esempio n. 26
0
    def _assert_line_int32_variable(self,
                                    ds,
                                    name,
                                    standard_name=None,
                                    long_name=None,
                                    orig_name=None):
        variable = ds.variables[name]
        self.assertEqual((7, ), variable.shape)
        self.assertEqual(DefaultData.get_default_fill_value(np.int32),
                         variable.data[4])
        self.assertEqual(DefaultData.get_default_fill_value(np.int32),
                         variable.attrs["_FillValue"])
        self._assert_name_attributes(variable, standard_name, long_name,
                                     orig_name)

        return variable
Esempio n. 27
0
    def add_quality_flags(dataset,
                          width,
                          height,
                          chunksizes=None,
                          masks_append=None,
                          meanings_append=None):
        default_array = DefaultData.create_default_array(width,
                                                         height,
                                                         np.uint8,
                                                         fill_value=0)
        variable = Variable(["y", "x"], default_array)
        variable.attrs["standard_name"] = "status_flag"
        TemplateUtil.add_geolocation_attribute(variable)

        masks = "1, 2, 4, 8, 16, 32, 64, 128"
        if masks_append is not None:
            masks = masks + masks_append
        variable.attrs["flag_masks"] = masks

        meanings = "invalid use_with_caution invalid_input invalid_geoloc invalid_time sensor_error padded_data incomplete_channel_data"
        if meanings_append is not None:
            meanings = meanings + meanings_append
        variable.attrs["flag_meanings"] = meanings

        if chunksizes is not None:
            TemplateUtil.add_chunking(variable, chunksizes)
        dataset["quality_pixel_bitmask"] = variable
Esempio n. 28
0
    def test_add_geolocation_attribute(self):
        default_array = DefaultData.create_default_array_3d(
            8, 6, 4, np.float32, np.NaN)
        variable = Variable(["channel", "y", "x"], default_array)

        TemplateUtil.add_geolocation_attribute(variable)
        self.assertEqual("longitude latitude", variable.attrs["coordinates"])
Esempio n. 29
0
 def _create_bt_uncertainty_variable(height, long_name):
     default_array = DefaultData.create_default_array(SWATH_WIDTH,
                                                      height,
                                                      np.float32,
                                                      fill_value=np.NaN)
     variable = Variable(["y", "x"], default_array)
     tu.add_units(variable, "K")
     tu.add_geolocation_attribute(variable)
     tu.add_encoding(variable,
                     np.int16,
                     DefaultData.get_default_fill_value(np.int16),
                     0.001,
                     chunksizes=CHUNKS_2D)
     variable.attrs["valid_max"] = 15000
     variable.attrs["valid_min"] = 1
     variable.attrs["long_name"] = long_name
     return variable
Esempio n. 30
0
    def test__get_add_offset_missing(self):
        default_array = DefaultData.create_default_vector(2, np.float32)
        variable = Variable(["y"], default_array)
        variable.encoding = dict([('dtype', np.int8), ('_FillValue', -127),
                                  ('scale_factor', 0.023)])

        add_offset = DataUtility._get_add_offset(variable)
        self.assertEqual(0.0, add_offset)