def test_calculate_external_single_deployment(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn), decode_times=False)

        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]
        nut_ds = nut_ds[['obs', 'time', 'deployment', 'spectral_channels',
                         'frame_type', 'nutnr_dark_value_used_for_fit']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        nut_stream_dataset = StreamDataset(self.nutnr_sk, {}, [self.ctdpf_sk], 'UNIT')
        nut_stream_dataset.events = self.nut_events
        nut_stream_dataset._insert_dataset(nut_ds)
        nut_stream_dataset.calculate_all()

        nut_stream_dataset.interpolate_needed({self.ctdpf_sk: ctd_stream_dataset})
        nut_stream_dataset.calculate_all()

        expected_params = ['ctdpf_sbe43_sample-seawater_temperature',
                           'ctdpf_sbe43_sample-practical_salinity',
                           'salinity_corrected_nitrate']
        self.assert_parameters_in_datasets(nut_stream_dataset.datasets, expected_params)
    def test_calculate_external_multiple_deployments(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn), decode_times=False)

        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]
        nut_ds = nut_ds[['obs', 'time', 'deployment', 'spectral_channels',
                         'frame_type', 'nutnr_dark_value_used_for_fit']]

        # remap times to make this two separate deployments
        dep1_start = self.ctd_events.deps[1].ntp_start
        dep2_stop = self.ctd_events.deps[2].ntp_start + 864000
        ctd_ds.time.values = np.linspace(dep1_start + 1, dep2_stop - 1, num=ctd_ds.time.shape[0])
        nut_ds.time.values = np.linspace(dep1_start + 1, dep2_stop - 1, num=nut_ds.time.shape[0])

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        nut_stream_dataset = StreamDataset(self.nutnr_sk, {}, [self.ctdpf_sk], 'UNIT')
        nut_stream_dataset.events = self.nut_events
        nut_stream_dataset._insert_dataset(nut_ds)
        nut_stream_dataset.calculate_all()

        nut_stream_dataset.interpolate_needed({self.ctdpf_sk: ctd_stream_dataset})
        nut_stream_dataset.calculate_all()

        expected_params = ['ctdpf_sbe43_sample-seawater_temperature',
                           'ctdpf_sbe43_sample-practical_salinity',
                           'salinity_corrected_nitrate']
        self.assert_parameters_in_datasets(nut_stream_dataset.datasets, expected_params)
Exemple #3
0
    def test_calculate_external_multiple_deployments(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn), decode_times=False)

        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]
        nut_ds = nut_ds[['obs', 'time', 'deployment', 'spectral_channels',
                         'frame_type', 'nutnr_dark_value_used_for_fit']]

        # remap times to make this two separate deployments
        dep1_start = self.ctd_events.deps[1].ntp_start
        dep2_stop = self.ctd_events.deps[2].ntp_start + 864000
        ctd_ds.time.values = np.linspace(dep1_start + 1, dep2_stop - 1, num=ctd_ds.time.shape[0])
        nut_ds.time.values = np.linspace(dep1_start + 1, dep2_stop - 1, num=nut_ds.time.shape[0])

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        nut_stream_dataset = StreamDataset(self.nutnr_sk, {}, [self.ctdpf_sk], 'UNIT')
        nut_stream_dataset.events = self.nut_events
        nut_stream_dataset._insert_dataset(nut_ds)
        nut_stream_dataset.calculate_all()

        nut_stream_dataset.interpolate_needed({self.ctdpf_sk: ctd_stream_dataset})
        nut_stream_dataset.calculate_all()

        expected_params = ['ctdpf_sbe43_sample-seawater_temperature',
                           'ctdpf_sbe43_sample-practical_salinity',
                           'salinity_corrected_nitrate']
        self.assert_parameters_in_datasets(nut_stream_dataset.datasets, expected_params)
Exemple #4
0
    def test_calculate_external_single_deployment(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn), decode_times=False)

        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]
        nut_ds = nut_ds[['obs', 'time', 'deployment', 'spectral_channels',
                         'frame_type', 'nutnr_dark_value_used_for_fit']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        nut_stream_dataset = StreamDataset(self.nutnr_sk, {}, [self.ctdpf_sk], 'UNIT')
        nut_stream_dataset.events = self.nut_events
        nut_stream_dataset._insert_dataset(nut_ds)
        nut_stream_dataset.calculate_all()

        nut_stream_dataset.interpolate_needed({self.ctdpf_sk: ctd_stream_dataset})
        nut_stream_dataset.calculate_all()

        expected_params = ['ctdpf_sbe43_sample-seawater_temperature',
                           'ctdpf_sbe43_sample-practical_salinity',
                           'salinity_corrected_nitrate']
        self.assert_parameters_in_datasets(nut_stream_dataset.datasets, expected_params)
Exemple #5
0
    def test_calculate_external_12035(self):
        velpt_ds = xr.open_dataset(os.path.join(DATA_DIR, self.velpt_fn),
                                   decode_times=False)
        metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, self.metbk_fn),
                                   decode_times=False)

        velpt_ds = velpt_ds[[
            'obs', 'time', 'deployment', 'velocity_beam1', 'velocity_beam2',
            'velocity_beam3', 'amplitude_beam1', 'amplitude_beam2',
            'amplitude_beam3'
        ]]

        metbk_ds = metbk_ds[[
            'obs', 'time', 'deployment', 'barometric_pressure',
            'relative_humidity', 'air_temperature', 'longwave_irradiance',
            'precipitation', 'sea_surface_temperature',
            'sea_surface_conductivity', 'shortwave_irradiance',
            'eastward_wind_velocity', 'northward_wind_velocity'
        ]]

        velpt_stream_dataset = StreamDataset(self.velpt_sk, {}, [], 'UNIT')
        velpt_stream_dataset.events = self.velpt_events
        velpt_stream_dataset._insert_dataset(velpt_ds)
        velpt_stream_dataset.calculate_all()

        metbk_stream_dataset = StreamDataset(self.metbk_sk, {},
                                             [self.velpt_sk], 'UNIT')
        metbk_stream_dataset.events = self.metbk_events
        metbk_stream_dataset._insert_dataset(metbk_ds)
        metbk_stream_dataset.calculate_all()
        metbk_stream_dataset.interpolate_needed(
            {self.velpt_sk: velpt_stream_dataset})
        metbk_stream_dataset.calculate_all()

        expected_params = [
            'met_barpres',
            'met_windavg_mag_corr_east',
            'met_windavg_mag_corr_north',
            'met_current_direction',
            'met_current_speed',
            'met_relwind_direction',
            'met_relwind_speed',
            'met_netsirr',
            'met_salsurf',
            'met_spechum',
            'met_heatflx_minute',
            'met_latnflx_minute',
            'met_netlirr_minute',
            'met_sensflx_minute',
        ]
        self.assert_parameters_in_datasets(metbk_stream_dataset.datasets,
                                           expected_params)
Exemple #6
0
    def test_calculate_internal_multiple_deployments(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn),
                                 decode_times=False)
        ctd_ds = ctd_ds[[
            'obs', 'time', 'deployment', 'temperature', 'pressure',
            'pressure_temp', 'conductivity', 'ext_volt0'
        ]]

        # remap times to make this two separate deployments
        dep1_start = self.ctd_events.deps[1].ntp_start
        dep2_stop = self.ctd_events.deps[2].ntp_start + 864000
        ctd_ds.time.values = np.linspace(dep1_start + 1,
                                         dep2_stop - 1,
                                         num=ctd_ds.time.shape[0])

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        for deployment in ctd_stream_dataset.datasets:
            ds = ctd_stream_dataset.datasets[deployment]
            tempwat = ctd_sbe16plus_tempwat(
                ds.temperature,
                ctd_stream_dataset.events.get_cal('CC_a0', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a1', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a2', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a3', deployment)[0][2])
            np.testing.assert_array_equal(ds.seawater_temperature, tempwat)

            pracsal = ctd_pracsal(ds.seawater_conductivity,
                                  ds.seawater_temperature,
                                  ds.seawater_pressure)
            np.testing.assert_array_equal(ds.practical_salinity, pracsal)
Exemple #7
0
    def test_calculate_internal_single_deployment(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn),
                                 decode_times=False)
        ctd_ds = ctd_ds[[
            'obs', 'time', 'deployment', 'temperature', 'pressure',
            'pressure_temp', 'conductivity', 'ext_volt0'
        ]]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        for deployment in ctd_stream_dataset.datasets:
            ds = ctd_stream_dataset.datasets[deployment]
            tempwat = ctd_sbe16plus_tempwat(
                ds.temperature,
                ctd_stream_dataset.events.get_cal('CC_a0', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a1', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a2', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a3', deployment)[0][2])
            np.testing.assert_array_equal(ds.seawater_temperature, tempwat)

            pracsal = ctd_pracsal(ds.seawater_conductivity,
                                  ds.seawater_temperature,
                                  ds.seawater_pressure)
            np.testing.assert_array_equal(ds.practical_salinity, pracsal)
    def test_exclude_data(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        times = ctd_ds.time.values

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)

        ctd_stream_dataset.exclude_flagged_data()
        np.testing.assert_array_equal(times, ctd_stream_dataset.datasets[2].time.values)

        # exclude a bit
        start = ntplib.ntp_to_system_time(times[0]) * 1000
        stop = ntplib.ntp_to_system_time(times[100]) * 1000
        anno = self._create_exclusion_anno(start, stop)
        ctd_stream_dataset.annotation_store.add_annotations([anno])

        ctd_stream_dataset.exclude_flagged_data()
        np.testing.assert_array_equal(times[101:], ctd_stream_dataset.datasets[2].time.values)

        # exclude everything
        start = ntplib.ntp_to_system_time(times[0]) * 1000
        stop = ntplib.ntp_to_system_time(times[-1]) * 1000
        anno = self._create_exclusion_anno(start, stop)
        ctd_stream_dataset.annotation_store.add_annotations([anno])

        ctd_stream_dataset.exclude_flagged_data()
        self.assertNotIn(2, ctd_stream_dataset.datasets)
Exemple #9
0
    def test_exclude_data(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        times = ctd_ds.time.values
        store = AnnotationStore()

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        
        ctd_stream_dataset.exclude_flagged_data(store)
        np.testing.assert_array_equal(times, ctd_stream_dataset.datasets[2].time.values)

        # exclude a bit
        start = ntplib.ntp_to_system_time(times[0]) * 1000
        stop = ntplib.ntp_to_system_time(times[100]) * 1000
        anno = self._create_exclusion_anno(self.ctdpf_sk, start, stop)
        store.add_annotations([anno])

        ctd_stream_dataset.exclude_flagged_data(store)
        np.testing.assert_array_equal(times[101:], ctd_stream_dataset.datasets[2].time.values)

        # exclude everything
        start = ntplib.ntp_to_system_time(times[0]) * 1000
        stop = ntplib.ntp_to_system_time(times[-1]) * 1000
        anno = self._create_exclusion_anno(self.ctdpf_sk, start, stop)
        store.add_annotations([anno])

        ctd_stream_dataset.exclude_flagged_data(store)
        self.assertNotIn(2, ctd_stream_dataset.datasets)
    def test_calculate_internal_multiple_deployments(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        # remap times to make this two separate deployments
        dep1_start = self.ctd_events.deps[1].ntp_start
        dep2_stop = self.ctd_events.deps[2].ntp_start + 864000
        ctd_ds.time.values = np.linspace(dep1_start+1, dep2_stop-1, num=ctd_ds.time.shape[0])

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        for deployment in ctd_stream_dataset.datasets:
            ds = ctd_stream_dataset.datasets[deployment]
            tempwat = ctd_sbe16plus_tempwat(ds.temperature,
                                            ctd_stream_dataset.events.get_cal('CC_a0', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a1', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a2', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a3', deployment)[0][2])
            np.testing.assert_array_equal(ds.seawater_temperature, tempwat)

            pracsal = ctd_pracsal(ds.seawater_conductivity,
                                  ds.seawater_temperature,
                                  ds.seawater_pressure)
            np.testing.assert_array_equal(ds.practical_salinity, pracsal)
Exemple #11
0
    def test_provenance_as_netcdf_attribute_missing(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = copy.deepcopy(self.ctd_events)
        ctd_stream_dataset.events.deps = {}
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.insert_instrument_attributes()
    def test_provenance_as_netcdf_attribute_missing(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = copy.deepcopy(self.ctd_events)
        ctd_stream_dataset.events.deps = {}
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.insert_instrument_attributes()
    def test_calculate_external_12035(self):
        velpt_ds = xr.open_dataset(os.path.join(DATA_DIR, self.velpt_fn), decode_times=False)
        metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, self.metbk_fn), decode_times=False)

        velpt_ds = velpt_ds[['obs', 'time', 'deployment', 'velocity_beam1', 'velocity_beam2', 'velocity_beam3',
                             'amplitude_beam1', 'amplitude_beam2', 'amplitude_beam3']]

        metbk_ds = metbk_ds[['obs', 'time', 'deployment', 'barometric_pressure', 'relative_humidity',
                             'air_temperature', 'longwave_irradiance', 'precipitation', 'sea_surface_temperature',
                             'sea_surface_conductivity', 'shortwave_irradiance', 'eastward_wind_velocity',
                             'northward_wind_velocity']]

        velpt_stream_dataset = StreamDataset(self.velpt_sk, {}, [], 'UNIT')
        velpt_stream_dataset.events = self.velpt_events
        velpt_stream_dataset._insert_dataset(velpt_ds)
        velpt_stream_dataset.calculate_all()

        metbk_stream_dataset = StreamDataset(self.metbk_sk, {}, [self.velpt_sk], 'UNIT')
        metbk_stream_dataset.events = self.metbk_events
        metbk_stream_dataset._insert_dataset(metbk_ds)
        metbk_stream_dataset.calculate_all()
        metbk_stream_dataset.interpolate_needed({self.velpt_sk: velpt_stream_dataset})
        metbk_stream_dataset.calculate_all()

        expected_params = ['met_barpres',
                           'met_windavg_mag_corr_east',
                           'met_windavg_mag_corr_north',
                           'met_current_direction',
                           'met_current_speed',
                           'met_relwind_direction',
                           'met_relwind_speed',
                           'met_netsirr',
                           'met_salsurf',
                           'met_spechum',
                           'met_heatflx_minute',
                           'met_latnflx_minute',
                           'met_netlirr_minute',
                           'met_sensflx_minute',
                           ]
        self.assert_parameters_in_datasets(metbk_stream_dataset.datasets, expected_params)
    def test_fill_missing(self):
        velpt_ds = xr.open_dataset(os.path.join(DATA_DIR, self.velpt_fn), decode_times=False)

        velpt_ds = velpt_ds[['obs', 'time', 'deployment', 'velocity_beam1', 'velocity_beam2', 'velocity_beam3',
                             'amplitude_beam1', 'amplitude_beam2', 'amplitude_beam3']]

        velpt_stream_dataset = StreamDataset(self.velpt_sk, {}, [], 'UNIT')
        velpt_stream_dataset.events = self.velpt_events
        velpt_stream_dataset._insert_dataset(velpt_ds)
        velpt_stream_dataset.fill_missing()

        expected_params = ['eastward_velocity']

        self.assert_parameters_in_datasets(velpt_stream_dataset.datasets, expected_params, expect_fill=True)
Exemple #15
0
    def test_fill_missing(self):
        velpt_ds = xr.open_dataset(os.path.join(DATA_DIR, self.velpt_fn), decode_times=False)

        velpt_ds = velpt_ds[['obs', 'time', 'deployment', 'velocity_beam1', 'velocity_beam2', 'velocity_beam3',
                             'amplitude_beam1', 'amplitude_beam2', 'amplitude_beam3']]

        velpt_stream_dataset = StreamDataset(self.velpt_sk, {}, [], 'UNIT')
        velpt_stream_dataset.events = self.velpt_events
        velpt_stream_dataset._insert_dataset(velpt_ds)
        velpt_stream_dataset.fill_missing()

        expected_params = ['eastward_velocity']

        self.assert_parameters_in_datasets(velpt_stream_dataset.datasets, expected_params, expect_fill=True)
Exemple #16
0
    def test_log_algorithm_inputs_no_result(self):
        def mock_write(self):
            return json.dumps(self.m_qdata, default=jdefault)

        uflags = {'advancedStreamEngineLogging': True, 'userName': '******'}
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, uflags, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)

        parameter = Parameter.query.get(13)
        with mock.patch('util.stream_dataset.ParameterReport.write', new=mock_write):
            result = ctd_stream_dataset._log_algorithm_inputs(parameter, {}, None, self.ctdpf_sk, ctd_ds)
            self.assertIsNotNone(result)
    def test_log_algorithm_inputs_no_result(self):
        def mock_write(self):
            return json.dumps(self.m_qdata, default=jdefault)

        uflags = {'advancedStreamEngineLogging': True, 'userName': '******'}
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, uflags, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)

        parameter = Parameter.query.get(13)
        with mock.patch('util.stream_dataset.ParameterReport.write', new=mock_write):
            result = ctd_stream_dataset._log_algorithm_inputs(parameter, {}, None, self.ctdpf_sk, ctd_ds)
            self.assertIsNotNone(result)
Exemple #18
0
    def test_provenance_as_netcdf_attribute(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn),
                                 decode_times=False)
        ctd_ds = ctd_ds[[
            'obs', 'time', 'deployment', 'temperature', 'pressure',
            'pressure_temp', 'conductivity', 'ext_volt0'
        ]]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.insert_instrument_attributes()
        for ds in ctd_stream_dataset.datasets.itervalues():
            self.assertIn('Manufacturer', ds.attrs)
            self.assertIn('ModelNumber', ds.attrs)
            self.assertIn('SerialNumber', ds.attrs)
            self.assertIn('Description', ds.attrs)
            self.assertIn('FirmwareVersion', ds.attrs)
            self.assertIn('SoftwareVersion', ds.attrs)
            self.assertIn('AssetUniqueID', ds.attrs)
            self.assertIn('Notes', ds.attrs)
            self.assertIn('Owner', ds.attrs)
            self.assertIn('RemoteResources', ds.attrs)
            self.assertIn('ShelfLifeExpirationDate', ds.attrs)
            self.assertIn('Mobile', ds.attrs)
            self.assertIn('AssetManagementRecordLastModified', ds.attrs)

            self.assertEqual(ds.attrs['Manufacturer'], 'Sea-Bird Electronics')
            self.assertEqual(ds.attrs['ModelNumber'], 'SBE 16plus V2')
            self.assertEqual(ds.attrs['SerialNumber'], '16-50112')
            self.assertEqual(ds.attrs['Description'],
                             'CTD Profiler: CTDPF Series A')
            self.assertEqual(ds.attrs['AssetUniqueID'], 'ATOSU-66662-00013')
            self.assertEqual(ds.attrs['Mobile'], 'False')
            self.assertEqual(ds.attrs['AssetManagementRecordLastModified'],
                             '2017-04-03T23:48:25.650000')

            self.assertEqual(ds.attrs['FirmwareVersion'], 'Not specified.')
            self.assertEqual(ds.attrs['SoftwareVersion'], 'Not specified.')
            self.assertEqual(ds.attrs['Notes'], 'Not specified.')
            self.assertEqual(ds.attrs['Owner'], 'Not specified.')
            self.assertEqual(ds.attrs['RemoteResources'], '[]')
            self.assertEqual(ds.attrs['ShelfLifeExpirationDate'],
                             'Not specified.')
    def test_provenance_as_netcdf_attribute(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.insert_instrument_attributes()
        for ds in ctd_stream_dataset.datasets.itervalues():
            self.assertIn('Manufacturer', ds.attrs)
            self.assertIn('ModelNumber', ds.attrs)
            self.assertIn('SerialNumber', ds.attrs)
            self.assertIn('Description', ds.attrs)
            self.assertIn('FirmwareVersion', ds.attrs)
            self.assertIn('SoftwareVersion', ds.attrs)
            self.assertIn('AssetUniqueID', ds.attrs)
            self.assertIn('Notes', ds.attrs)
            self.assertIn('Owner', ds.attrs)
            self.assertIn('RemoteResources', ds.attrs)
            self.assertIn('ShelfLifeExpirationDate', ds.attrs)
            self.assertIn('Mobile', ds.attrs)
            self.assertIn('AssetManagementRecordLastModified', ds.attrs)

            self.assertEqual(ds.attrs['Manufacturer'], 'Sea-Bird Electronics')
            self.assertEqual(ds.attrs['ModelNumber'], 'SBE 16plus V2')
            self.assertEqual(ds.attrs['SerialNumber'], '16-50112')
            self.assertEqual(ds.attrs['Description'], 'CTD Profiler: CTDPF Series A')
            self.assertEqual(ds.attrs['AssetUniqueID'], 'ATOSU-66662-00013')
            self.assertEqual(ds.attrs['Mobile'], 'False')
            self.assertEqual(ds.attrs['AssetManagementRecordLastModified'], '2017-04-03T23:48:25.650000')

            self.assertEqual(ds.attrs['FirmwareVersion'], 'Not specified.')
            self.assertEqual(ds.attrs['SoftwareVersion'], 'Not specified.')
            self.assertEqual(ds.attrs['Notes'], 'Not specified.')
            self.assertEqual(ds.attrs['Owner'], 'Not specified.')
            self.assertEqual(ds.attrs['RemoteResources'], '[]')
            self.assertEqual(ds.attrs['ShelfLifeExpirationDate'], 'Not specified.')
    def test_calculate_internal_single_deployment(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        for deployment in ctd_stream_dataset.datasets:
            ds = ctd_stream_dataset.datasets[deployment]
            tempwat = ctd_sbe16plus_tempwat(ds.temperature,
                                            ctd_stream_dataset.events.get_cal('CC_a0', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a1', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a2', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a3', deployment)[0][2])
            np.testing.assert_array_equal(ds.seawater_temperature, tempwat)

            pracsal = ctd_pracsal(ds.seawater_conductivity,
                                  ds.seawater_temperature,
                                  ds.seawater_pressure)
            np.testing.assert_array_equal(ds.practical_salinity, pracsal)
    def fetch_raw_data(self):
        """
        Fetch the source data for this request
        :return:
        """
        # Start fetching calibration data from Asset Management
        am_events = {}
        am_futures = {}
        for stream_key in self.stream_parameters:
            refdes = '-'.join((stream_key.subsite, stream_key.node, stream_key.sensor))
            am_futures[stream_key] = self.asset_management.get_events_async(refdes)

        # Resolve calibration data futures and attach to instrument data
        for stream_key in am_futures:
            events = am_futures[stream_key].result()
            am_events[stream_key] = events

        # Start fetching instrument data
        for stream_key, stream_parameters in self.stream_parameters.iteritems():
            other_streams = set(self.stream_parameters)
            other_streams.remove(stream_key)
            should_pad = stream_key != self.stream_key
            if not stream_key.is_virtual:
                log.debug('<%s> Fetching raw data for %s', self.request_id, stream_key.as_refdes())
                sd = StreamDataset(stream_key, self.uflags, other_streams, self.request_id)
                sd.events = am_events[stream_key]
                try:
                    sd.fetch_raw_data(self.time_range, self.limit, should_pad)
                    self.datasets[stream_key] = sd
                except MissingDataException as e:
                    if stream_key == self.stream_key:
                        raise MissingDataException("Query returned no results for primary stream")
                    elif stream_key.stream in self.stream_key.stream.source_streams:
                        raise MissingDataException("Query returned no results for source stream")
                    else:
                        log.error('<%s> %s', self.request_id, e.message)

            else:
                log.debug('<%s> Creating empty dataset for virtual stream: %s',
                          self.request_id, stream_key.as_refdes())
                sd = StreamDataset(stream_key, self.uflags, other_streams, self.request_id)
                sd.events = am_events[stream_key]
                self.datasets[stream_key] = sd

        # Fetch annotations
        self._insert_annotations()
        self._exclude_flagged_data()
        self._exclude_nondeployed_data()

        # Verify data still exists after masking virtual
        message = 'Query returned no results for %s stream (due to deployment or annotation mask)'
        if self.stream_key.is_virtual:
            found_streams = [stream.stream for stream in self.datasets
                             if self.datasets[stream]]
            if not any(stream in self.stream_key.stream.source_streams for stream in found_streams):
                raise MissingDataException(message % 'source')
        # real
        else:
            primary_stream_dataset = self.datasets[self.stream_key]
            if not primary_stream_dataset.datasets:
                raise MissingDataException(message % 'primary')

        # Remove any empty, non-virtual supporting datasets
        for stream_key in list(self.datasets):
            if not stream_key.is_virtual:
                if not self.datasets[stream_key].datasets:
                    del self.datasets[stream_key]
Exemple #22
0
    def fetch_raw_data(self):
        """
        Fetch the source data for this request
        :return:
        """
        # Start fetching calibration data from Asset Management
        am_events = {}
        am_futures = {}
        for stream_key in self.stream_parameters:
            refdes = '-'.join(
                (stream_key.subsite, stream_key.node, stream_key.sensor))
            am_futures[stream_key] = self.asset_management.get_events_async(
                refdes)

        # Resolve calibration data futures and attach to instrument data
        for stream_key in am_futures:
            events = am_futures[stream_key].result()
            am_events[stream_key] = events

        # Start fetching instrument data
        for stream_key, stream_parameters in self.stream_parameters.iteritems(
        ):
            other_streams = set(self.stream_parameters)
            other_streams.remove(stream_key)
            should_pad = stream_key != self.stream_key
            if not stream_key.is_virtual:
                log.debug('<%s> Fetching raw data for %s', self.request_id,
                          stream_key.as_refdes())
                sd = StreamDataset(stream_key, self.uflags, other_streams,
                                   self.request_id)
                sd.events = am_events[stream_key]
                try:
                    sd.fetch_raw_data(self.time_range, self.limit, should_pad)
                    self.datasets[stream_key] = sd
                except MissingDataException as e:
                    if stream_key == self.stream_key:
                        raise MissingDataException(
                            "Query returned no results for primary stream")
                    elif stream_key.stream in self.stream_key.stream.source_streams:
                        raise MissingDataException(
                            "Query returned no results for source stream")
                    else:
                        log.error('<%s> %s', self.request_id, e.message)

            else:
                log.debug('<%s> Creating empty dataset for virtual stream: %s',
                          self.request_id, stream_key.as_refdes())
                sd = StreamDataset(stream_key, self.uflags, other_streams,
                                   self.request_id)
                sd.events = am_events[stream_key]
                self.datasets[stream_key] = sd

        self._exclude_flagged_data()
        self._exclude_nondeployed_data()

        # Verify data still exists after masking virtual
        message = 'Query returned no results for %s stream (due to deployment or annotation mask)'
        if self.stream_key.is_virtual:
            found_streams = [
                stream.stream for stream in self.datasets
                if self.datasets[stream]
            ]
            if not any(stream in self.stream_key.stream.source_streams
                       for stream in found_streams):
                raise MissingDataException(message % 'source')
        # real
        else:
            primary_stream_dataset = self.datasets[self.stream_key]
            if not primary_stream_dataset.datasets:
                raise MissingDataException(message % 'primary')

        # Remove any empty, non-virtual supporting datasets
        for stream_key in list(self.datasets):
            if not stream_key.is_virtual:
                if not self.datasets[stream_key].datasets:
                    del self.datasets[stream_key]