Esempio n. 1
0
 def test_dataset_with_adef_and_wrongs_dims(self):
     """Test the correct execution of the get_dataset function with dims that don't match expected AreaDefinition."""
     self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file,
                                         filename_info={},
                                         filetype_info={},
                                         with_area_definition=True)
     with pytest.raises(NotImplementedError):
         self.fh.get_dataset(
             make_dataid(name='test_wrong_dims', resolution=6000), {
                 'name': 'test_wrong_dims',
                 'file_key': 'test_values',
                 'fill_value': -999
             })
Esempio n. 2
0
    def test_dataset_slicing_catid(self):
        """Test the correct execution of the _slice_dataset function with 'category_id' set."""
        self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file,
                                            filename_info={},
                                            filetype_info={})

        dataset = self.fh.get_dataset(
            make_dataid(name='test_values', resolution=32000), {
                'name': 'test_values',
                'file_key': 'test_values',
                'fill_value': -999,
                'category_id': 5
            })
        expected_dataset = self._get_unique_array(range(8), 5)
        np.testing.assert_allclose(dataset.values, expected_dataset)
Esempio n. 3
0
    def test_dataset_with_invalid_filekey(self):
        """Test the correct execution of the get_dataset function with an invalid file_key."""
        self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file,
                                            filename_info={},
                                            filetype_info={})

        # Checks the correct execution of the get_dataset function with an invalid file_key
        invalid_dataset = self.fh.get_dataset(
            make_dataid(name='test_invalid', resolution=32000), {
                'name': 'test_invalid',
                'file_key': 'test_invalid',
                'fill_value': -999,
            })
        # Checks that the function returns None
        self.assertEqual(invalid_dataset, None)
Esempio n. 4
0
    def setUp(self):
        """Set up the test by creating a test file and opening it with the reader."""
        # Easiest way to test the reader is to create a test netCDF file on the fly

        with Dataset(TEST_ERROR_FILE, 'w') as nc_err:
            # Create dimensions
            nc_err.createDimension('number_of_FoR_cols', 10)
            nc_err.createDimension('number_of_FoR_rows', 100)
            nc_err.createDimension('number_of_channels', 8)
            nc_err.createDimension('number_of_categories', 6)
            # add erroneous global attributes
            nc_err.data_source = 'test_fci_data_source'  # Error in key name
            nc_err.platform_err = 'test_fci_platform'  # Error in key name
            nc_err.time_coverage_start = '2017092017304000'  # Error in time format
            nc_err.time_coverage_end_err = '20170920174117'  # Error in key name

            # Add datasets
            x = nc_err.createVariable('x', np.float32, dimensions=('number_of_FoR_cols',))
            x.standard_name = 'projection_x_coordinate'
            x[:] = np.arange(10)

            y = nc_err.createVariable('y', np.float32, dimensions=('number_of_FoR_rows',))
            x.standard_name = 'projection_y_coordinate'
            y[:] = np.arange(100)

            chans = nc_err.createVariable('channels', np.float32, dimensions=('number_of_channels',))
            chans.standard_name = 'fci_channels'
            chans[:] = np.arange(8)

            cats = nc_err.createVariable('categories', np.float32, dimensions=('number_of_categories',))
            cats.standard_name = 'product_categories'
            cats[:] = np.arange(6)

            test_dataset = nc_err.createVariable('test_values', np.float32,
                                                 dimensions=('number_of_FoR_rows', 'number_of_FoR_cols',
                                                             'number_of_channels', 'number_of_categories'))
            test_dataset[:] = np.ones((100, 10, 8, 6))
            test_dataset.test_attr = 'attr'
            test_dataset.units = 'test_units'

        self.error_reader = FciL2NCSegmentFileHandler(
            filename=TEST_ERROR_FILE,
            filename_info={
                'creation_time': datetime.datetime(year=2017, month=9, day=20,
                                                   hour=12, minute=30, second=30),
            },
            filetype_info={}
        )
Esempio n. 5
0
    def test_dataset_slicing_irid(self):
        """Test the correct execution of the _slice_dataset function with 'ir_channel_id' set."""
        self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file,
                                            filename_info={},
                                            filetype_info={})

        self.fh.nc = self.fh.nc.rename_dims(
            {'number_of_channels': 'number_of_ir_channels'})
        dataset = self.fh.get_dataset(
            make_dataid(name='test_values', resolution=32000), {
                'name': 'test_values',
                'file_key': 'test_values',
                'fill_value': -999,
                'ir_channel_id': 4
            })
        expected_dataset = self._get_unique_array(4, range(6))
        np.testing.assert_allclose(dataset.values, expected_dataset)
Esempio n. 6
0
    def test_dataset_with_scalar(self):
        """Test the execution of the get_dataset function for scalar values."""
        self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file,
                                            filename_info={},
                                            filetype_info={})
        # Checks returned scalar value
        dataset = self.fh.get_dataset(
            make_dataid(name='test_scalar'), {
                'name': 'product_quality',
                'file_key': 'product_quality',
                'file_type': 'test_file_type'
            })
        self.assertEqual(dataset.values, 99.)

        # Checks that no AreaDefintion is implemented for scalar values
        with pytest.raises(NotImplementedError):
            self.fh.get_area_def(None)
Esempio n. 7
0
    def test_all_basic(self):
        """Test all basic functionalities."""
        self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file,
                                            filename_info={},
                                            filetype_info={})

        assert self.fh.spacecraft_name == 'test_fci_platform'
        assert self.fh.sensor_name == 'test_fci_data_source'
        assert self.fh.ssp_lon == 0.0

        global_attributes = self.fh._get_global_attributes()

        expected_global_attributes = {
            'filename': self.seg_test_file,
            'spacecraft_name': 'test_fci_platform',
            'ssp_lon': 0.0,
            'sensor': 'test_fci_data_source',
            'platform_name': 'test_fci_platform'
        }
        self.assertEqual(global_attributes, expected_global_attributes)
Esempio n. 8
0
    def test_dataset(self):
        """Test the correct execution of the get_dataset function with valid file_key."""
        self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file,
                                            filename_info={},
                                            filetype_info={})

        # Checks the correct execution of the get_dataset function with a valid file_key
        dataset = self.fh.get_dataset(
            make_dataid(name='test_values', resolution=32000), {
                'name': 'test_values',
                'file_key': 'test_values',
                'fill_value': -999,
            })
        expected_dataset = self._get_unique_array(range(8), range(6))
        np.testing.assert_allclose(dataset.values, expected_dataset)
        self.assertEqual(dataset.attrs['test_attr'], 'attr')
        self.assertEqual(dataset.attrs['units'], 'test_units')
        self.assertEqual(dataset.attrs['fill_value'], -999)

        # Checks that no AreaDefintion is implemented
        with pytest.raises(NotImplementedError):
            self.fh.get_area_def(None)
Esempio n. 9
0
    def test_dataset_with_adef(self):
        """Test the correct execution of the get_dataset function with `with_area_definition=True`."""
        self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file,
                                            filename_info={},
                                            filetype_info={},
                                            with_area_definition=True)

        # Checks the correct execution of the get_dataset function with a valid file_key
        dataset = self.fh.get_dataset(
            make_dataid(name='test_values', resolution=32000), {
                'name': 'test_values',
                'file_key': 'test_values',
                'fill_value': -999,
                'coordinates': ('test_lon', 'test_lat'),
            })
        expected_dataset = self._get_unique_array(range(8), range(6))
        np.testing.assert_allclose(dataset.values, expected_dataset)
        self.assertEqual(dataset.attrs['test_attr'], 'attr')
        self.assertEqual(dataset.attrs['units'], 'test_units')
        self.assertEqual(dataset.attrs['fill_value'], -999)

        # Checks returned AreaDefinition against reference
        adef = self.fh.get_area_def(None)
        self.assertEqual(adef, SEG_AREA_DEF)