Пример #1
0
    def create_nut_sr(self):
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'
        qc = json.load(open(os.path.join(DATA_DIR, 'qc.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        sr = StreamRequest(self.nut_sk, [18],
                           tr, {},
                           qc_parameters=qc,
                           request_id='UNIT')
        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn),
                                   decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn),
                                   decode_times=False)

        nutnr_ds = nutnr_ds[
            self.base_params +
            [p.name for p in sr.stream_parameters[self.nut_sk]]]
        ctdpf_ds = ctdpf_ds[
            self.base_params +
            [p.name for p in sr.stream_parameters[self.ctd_sk]]]

        sr.datasets[self.ctd_sk] = StreamDataset(self.ctd_sk, sr.uflags,
                                                 [self.nut_sk], sr.request_id)
        sr.datasets[self.nut_sk] = StreamDataset(self.nut_sk, sr.uflags,
                                                 [self.ctd_sk], sr.request_id)
        sr.datasets[self.ctd_sk].events = self.ctd_events
        sr.datasets[self.nut_sk].events = self.nut_events
        sr.datasets[self.ctd_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[self.nut_sk]._insert_dataset(nutnr_ds)
        return sr
Пример #2
0
    def test_calculate(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed',
                             'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed',
                             'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {
            k: [{
                'start': tr.start - 1,
                'stop': tr.stop + 1,
                'value': cals[k],
                'deployment': 1
            }]
            for k in cals
        }
        sr = StreamRequest(nutnr_sk, [2443],
                           coefficients,
                           tr, {},
                           request_id='UNIT')
        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn),
                                   decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn),
                                   decode_times=False)

        nutnr_ds = nutnr_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[nutnr_sk]]]
        ctdpf_ds = ctdpf_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[ctdpf_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients,
                                              sr.uflags, [nutnr_sk],
                                              sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients,
                                              sr.uflags, [ctdpf_sk],
                                              sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()

        ds = sr.datasets[ctdpf_sk]
        tempwat = ctd_sbe16plus_tempwat(ds.datasets[0].temperature,
                                        cals['CC_a0'], cals['CC_a1'],
                                        cals['CC_a2'], cals['CC_a3'])
        np.testing.assert_array_equal(ds.datasets[0].seawater_temperature,
                                      tempwat)

        pracsal = ctd_pracsal(ds.datasets[0].seawater_conductivity,
                              ds.datasets[0].seawater_temperature,
                              ds.datasets[0].seawater_pressure)
        np.testing.assert_array_equal(ds.datasets[0].practical_salinity,
                                      pracsal)

        response = json.loads(JsonResponse(sr).json())
        self.assertEqual(len(response), len(nutnr_ds.time.values))
Пример #3
0
    def create_metbk_hourly_sr(self):
        metbk_fn = 'metbk_a_dcl_instrument_recovered.nc'
        metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, metbk_fn), decode_times=False)
        vel_fn = 'velpt_ab_dcl_instrument_recovered.nc'
        vel_ds = xr.open_dataset(os.path.join(DATA_DIR, vel_fn), decode_times=False)

        # both of these datasets are labeled deployment 3 but the times are squarely in deployment 1. Fix.
        metbk_ds.deployment.values[:] = 1
        vel_ds.deployment.values[:] = 1

        tr = TimeRange(metbk_ds.time.values[0], metbk_ds.time.values[-1])

        sr = StreamRequest(self.hourly_sk, [], tr, {}, request_id='UNIT')

        metbk_ds = metbk_ds[self.base_params + [p.name for p in sr.stream_parameters[self.met_sk]]]
        vel_ds = vel_ds[self.base_params + [p.name for p in sr.stream_parameters[self.vel_sk]]]

        sr.datasets[self.met_sk] = StreamDataset(self.met_sk, sr.uflags, [self.hourly_sk, self.vel_sk], sr.request_id)
        sr.datasets[self.hourly_sk] = StreamDataset(self.hourly_sk, sr.uflags, [self.met_sk, self.vel_sk], sr.request_id)
        sr.datasets[self.vel_sk] = StreamDataset(self.vel_sk, sr.uflags, [self.hourly_sk, self.met_sk], sr.request_id)

        sr.datasets[self.hourly_sk].events = self.met_events
        sr.datasets[self.met_sk].events = self.met_events
        sr.datasets[self.vel_sk].events = self.vel_events

        sr.datasets[self.met_sk]._insert_dataset(metbk_ds)
        sr.datasets[self.vel_sk]._insert_dataset(vel_ds)
        return sr
    def test_exclude_data(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        times = ctd_ds.time.values

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)

        ctd_stream_dataset.exclude_flagged_data()
        np.testing.assert_array_equal(times, ctd_stream_dataset.datasets[2].time.values)

        # exclude a bit
        start = ntplib.ntp_to_system_time(times[0]) * 1000
        stop = ntplib.ntp_to_system_time(times[100]) * 1000
        anno = self._create_exclusion_anno(start, stop)
        ctd_stream_dataset.annotation_store.add_annotations([anno])

        ctd_stream_dataset.exclude_flagged_data()
        np.testing.assert_array_equal(times[101:], ctd_stream_dataset.datasets[2].time.values)

        # exclude everything
        start = ntplib.ntp_to_system_time(times[0]) * 1000
        stop = ntplib.ntp_to_system_time(times[-1]) * 1000
        anno = self._create_exclusion_anno(start, stop)
        ctd_stream_dataset.annotation_store.add_annotations([anno])

        ctd_stream_dataset.exclude_flagged_data()
        self.assertNotIn(2, ctd_stream_dataset.datasets)
Пример #5
0
    def test_cspp_nutnr_uses_ctd_pressure(self):
        nut_fn = 'nutnr_j_cspp_instrument_recovered.nc'
        ctd_fn = 'ctdpf_j_cspp_instrument_recovered.nc'

        nut_ds = xr.open_dataset(os.path.join(DATA_DIR, nut_fn),
                                 decode_times=False)
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn),
                                 decode_times=False)

        tr = TimeRange(nut_ds.time.values[0], nut_ds.time.values[-1])
        sr = StreamRequest(self.nut_sk2, [], tr, {}, request_id='UNIT')

        nut_ds = nut_ds[self.base_params +
                        [p.name for p in sr.stream_parameters[self.nut_sk2]]]

        sr.datasets[self.ctd_sk2] = StreamDataset(self.ctd_sk2, sr.uflags,
                                                  [self.nut_sk2],
                                                  sr.request_id)
        sr.datasets[self.nut_sk2] = StreamDataset(self.nut_sk2, sr.uflags,
                                                  [self.ctd_sk2],
                                                  sr.request_id)

        sr.datasets[self.ctd_sk2]._insert_dataset(ctd_ds)
        sr.datasets[self.nut_sk2]._insert_dataset(nut_ds)

        sr.calculate_derived_products()
        sr.import_extra_externals()
        sr.rename_parameters()

        self.assertIn('int_ctd_pressure',
                      sr.datasets[self.nut_sk2].datasets[1])

        data = json.loads(JsonResponse(sr).json())
        for each in data:
            self.assertIn('int_ctd_pressure', each)
Пример #6
0
    def fetch_raw_data(self):
        """
        Fetch the source data for this request
        :return:
        """
        for stream_key, stream_parameters in self.stream_parameters.iteritems():
            other_streams = set(self.stream_parameters)
            other_streams.remove(stream_key)
            should_pad = stream_key != self.stream_key
            if not stream_key.is_virtual:
                log.debug('<%s> Fetching raw data for %s', self.request_id, stream_key.as_refdes())
                sd = StreamDataset(stream_key, self.coefficients, self.uflags, other_streams, self.request_id)
                try:
                    sd.fetch_raw_data(self.time_range, self.limit, should_pad)
                    self.datasets[stream_key] = sd
                except MissingDataException as e:
                    if stream_key == self.stream_key:
                        raise MissingDataException("Query returned no results for primary stream")
                    elif stream_key in self.stream_key.stream.source_streams:
                        raise MissingDataException("Query returned no results for source stream")
                    else:
                        log.error('<%s> %s', self.request_id, e.message)

            else:
                log.debug('<%s> Creating empty dataset for virtual stream: %s',
                          self.request_id, stream_key.as_refdes())
                sd = StreamDataset(stream_key, self.coefficients, self.uflags, other_streams, self.request_id)
                self.datasets[stream_key] = sd
    def test_insert_valid_scalar_data(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        data = np.zeros_like(ctd_ds.time.values)
        param = Parameter.query.get(3777)

        StreamDataset._insert_data(ctd_ds, param, data)
        self.assertIn('corrected_dissolved_oxygen', ctd_ds)
Пример #8
0
    def test_insert_valid_scalar_data(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        data = np.zeros_like(ctd_ds.time.values)
        param = Parameter.query.get(3777)

        StreamDataset._insert_data(ctd_ds, param, data)
        self.assertIn('corrected_dissolved_oxygen', ctd_ds)
    def test_insert_valid_array_data(self):
        adcp_fn = 'deployment0000_RS03AXBS-LJ03A-10-ADCPTE301-streamed-adcp_velocity_beam.nc'
        adcp_ds = xr.open_dataset(os.path.join(DATA_DIR, adcp_fn), decode_times=False)

        data = np.zeros_like(adcp_ds.velocity_beam1)
        param = Parameter.query.get(2769)

        StreamDataset._insert_data(adcp_ds, param, data)
        self.assertIn('corrected_echo_intensity_beam1', adcp_ds)

        self.assertEqual(set(adcp_ds.corrected_echo_intensity_beam1.dims), {'obs', 'bin'})
Пример #10
0
    def test_insert_valid_array_data(self):
        adcp_fn = 'deployment0000_RS03AXBS-LJ03A-10-ADCPTE301-streamed-adcp_velocity_beam.nc'
        adcp_ds = xr.open_dataset(os.path.join(DATA_DIR, adcp_fn), decode_times=False)

        data = np.zeros_like(adcp_ds.velocity_beam1)
        param = Parameter.query.get(2769)

        StreamDataset._insert_data(adcp_ds, param, data)
        self.assertIn('corrected_echo_intensity_beam1', adcp_ds)

        self.assertEqual(set(adcp_ds.corrected_echo_intensity_beam1.dims), {'obs', 'bin'})
Пример #11
0
    def test_insert_bad_shape_array_data(self):
        adcp_fn = 'deployment0000_RS03AXBS-LJ03A-10-ADCPTE301-streamed-adcp_velocity_beam.nc'
        adcp_ds = xr.open_dataset(os.path.join(DATA_DIR, adcp_fn), decode_times=False)

        data = np.zeros_like(adcp_ds.time)
        param = Parameter.query.get(2769)

        with self.assertRaises(ValueError):
            StreamDataset._insert_data(adcp_ds, param, data)

        self.assertNotIn('corrected_echo_intensity_beam1', adcp_ds)
Пример #12
0
    def test_add_externals(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed',
                             'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed',
                             'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {
            k: [{
                'start': tr.start - 1,
                'stop': tr.stop + 1,
                'value': cals[k],
                'deployment': 1
            }]
            for k in cals
        }
        sr = StreamRequest(nutnr_sk, [2443],
                           coefficients,
                           tr, {},
                           request_id='UNIT')

        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn),
                                   decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn),
                                   decode_times=False)
        ctdpf_ds = ctdpf_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[ctdpf_sk]]]
        nutnr_ds = nutnr_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[nutnr_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients,
                                              sr.uflags, [nutnr_sk],
                                              sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients,
                                              sr.uflags, [ctdpf_sk],
                                              sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()
        sr.import_extra_externals()

        self.assertIn('ctdpf_sbe43_sample-seawater_pressure',
                      sr.datasets[nutnr_sk].datasets[0])
        self.assertNotIn('ctdpf_sbe43_sample-seawater_pressure',
                         sr.datasets[ctdpf_sk].datasets[0])

        data = json.loads(JsonResponse(sr).json())
        for each in data:
            self.assertIn('int_ctd_pressure', each)
    def test_insert_bad_shape_array_data(self):
        adcp_fn = 'deployment0000_RS03AXBS-LJ03A-10-ADCPTE301-streamed-adcp_velocity_beam.nc'
        adcp_ds = xr.open_dataset(os.path.join(DATA_DIR, adcp_fn), decode_times=False)

        data = np.zeros_like(adcp_ds.time)
        param = Parameter.query.get(2769)

        with self.assertRaises(ValueError):
            StreamDataset._insert_data(adcp_ds, param, data)

        self.assertNotIn('corrected_echo_intensity_beam1', adcp_ds)
Пример #14
0
    def test_qc(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed',
                             'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed',
                             'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))
        qc = json.load(open(os.path.join(DATA_DIR, 'qc.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {
            k: [{
                'start': tr.start - 1,
                'stop': tr.stop + 1,
                'value': cals[k],
                'deployment': 1
            }]
            for k in cals
        }
        sr = StreamRequest(nutnr_sk, [2443],
                           coefficients,
                           tr, {},
                           qc_parameters=qc,
                           request_id='UNIT')

        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn),
                                   decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn),
                                   decode_times=False)
        ctdpf_ds = ctdpf_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[ctdpf_sk]]]
        nutnr_ds = nutnr_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[nutnr_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients,
                                              sr.uflags, [nutnr_sk],
                                              sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients,
                                              sr.uflags, [ctdpf_sk],
                                              sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()

        expected_parameters = [
            'temp_sal_corrected_nitrate_qc_executed',
            'temp_sal_corrected_nitrate_qc_results'
        ]
        self.assert_parameters_in_datasets(sr.datasets[nutnr_sk].datasets,
                                           expected_parameters)
Пример #15
0
    def test_insert_bad_length_data(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        data = np.arange(0, 100)
        param = Parameter.query.get(3777)

        with self.assertRaises(ValueError):
            StreamDataset._insert_data(ctd_ds, param, data)

        self.assertNotIn('corrected_dissolved_oxygen', ctd_ds)
    def test_insert_bad_length_data(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        data = np.arange(0, 100)
        param = Parameter.query.get(3777)

        with self.assertRaises(ValueError):
            StreamDataset._insert_data(ctd_ds, param, data)

        self.assertNotIn('corrected_dissolved_oxygen', ctd_ds)
Пример #17
0
        def mock_fetch_raw_data(self):
            if StreamRequestTest.call_cnt % 2 == 0:
                self.datasets[self.stream_key] = StreamDataset(self.stream_key, self.uflags, [], self.request_id)
                self.datasets[self.stream_key]._insert_dataset(ctd_ds)
                self.datasets[self.stream_key].events = AssetEvents('test', [])

            else:
                self.datasets[self.stream_key] = StreamDataset(self.stream_key, self.uflags, [], self.request_id)
                self.datasets[self.stream_key]._insert_dataset(echo_ds)
                self.datasets[self.stream_key].events = AssetEvents('test', [])

            StreamRequestTest.call_cnt += 1
Пример #18
0
    def test_add_externals_glider(self):
        gps_fn = 'deployment0003_CE05MOAS-GL319-00-ENG000000-recovered_host-glider_gps_position.nc'
        par_fn = 'deployment0003_CE05MOAS-GL319-01-PARADM000-recovered_host-parad_m_glider_recovered.nc'
        ctd_fn = 'deployment0003_CE05MOAS-GL319-05-CTDGVM000-recovered_host-ctdgv_m_glider_instrument_recovered.nc'

        gps_sk = StreamKey('CE05MOAS', 'GL319', '00-ENG000000', 'recovered_host', 'glider_gps_position')
        par_sk = StreamKey('CE05MOAS', 'GL319', '01-PARADM000', 'recovered_host', 'parad_m_glider_recovered')
        ctd_sk = StreamKey('CE05MOAS', 'GL319', '05-CTDGVM000', 'recovered_host', 'ctdgv_m_glider_instrument_recovered')

        # Fetch the source data
        gps_ds = xr.open_dataset(os.path.join(DATA_DIR, gps_fn), decode_times=False)
        par_ds = xr.open_dataset(os.path.join(DATA_DIR, par_fn), decode_times=False)
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn), decode_times=False)

        # Create the stream request
        tr = TimeRange(par_ds.time.values[0], par_ds.time.values[-1])
        sr = StreamRequest(par_sk, [], tr, {}, request_id='UNIT')

        # Filter the source data to just the data the stream request says we need
        gps_ds = gps_ds[self.base_params + [p.name for p in sr.stream_parameters[gps_sk]]]
        par_ds = par_ds[self.base_params + [p.name for p in sr.stream_parameters[par_sk]]]
        ctd_ds = ctd_ds[self.base_params + [p.name for p in sr.stream_parameters[ctd_sk]]]

        # Create the StreamDataset objects
        sr.datasets[gps_sk] = StreamDataset(gps_sk, sr.uflags, [par_sk, ctd_sk], sr.request_id)
        sr.datasets[par_sk] = StreamDataset(par_sk, sr.uflags, [gps_sk, ctd_sk], sr.request_id)
        sr.datasets[ctd_sk] = StreamDataset(ctd_sk, sr.uflags, [par_sk, gps_sk], sr.request_id)

        sr.datasets[gps_sk].events = self.get_events(gps_sk)
        sr.datasets[par_sk].events = self.get_events(par_sk)
        sr.datasets[ctd_sk].events = self.get_events(ctd_sk)

        # Insert the source data
        sr.datasets[gps_sk]._insert_dataset(gps_ds)
        sr.datasets[par_sk]._insert_dataset(par_ds)
        sr.datasets[ctd_sk]._insert_dataset(ctd_ds)

        sr.calculate_derived_products()
        sr.import_extra_externals()

        # Ticket 9328: int_ctd_pressure is now set in stream_request.import_extra_externals()
        self.assertNotIn('ctdgv_m_glider_instrument_recovered-sci_water_pressure_dbar', sr.datasets[par_sk].datasets[3])
        self.assertIn('int_ctd_pressure', sr.datasets[par_sk].datasets[3])
        self.assertNotIn('ctdgv_m_glider_instrument_recovered-sci_water_pressure_dbar', sr.datasets[ctd_sk].datasets[3])

        data = json.loads(JsonResponse(sr).json())
        for each in data:
            self.assertIn('int_ctd_pressure', each)
            self.assertIn('lat', each)
            self.assertIn('lon', each)
Пример #19
0
    def test_csv(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed',
                             'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed',
                             'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {
            k: [{
                'start': tr.start - 1,
                'stop': tr.stop + 1,
                'value': cals[k],
                'deployment': 1
            }]
            for k in cals
        }
        sr = StreamRequest(nutnr_sk, [2443],
                           coefficients,
                           tr, {},
                           request_id='UNIT')
        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn),
                                   decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn),
                                   decode_times=False)

        nutnr_ds = nutnr_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[nutnr_sk]]]
        ctdpf_ds = ctdpf_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[ctdpf_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients,
                                              sr.uflags, [nutnr_sk],
                                              sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients,
                                              sr.uflags, [ctdpf_sk],
                                              sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()
        csv = CsvGenerator(sr, ',').to_csv()
        self.assertTrue(csv)
    def test_log_algorithm_inputs_no_result(self):
        def mock_write(self):
            return json.dumps(self.m_qdata, default=jdefault)

        uflags = {'advancedStreamEngineLogging': True, 'userName': '******'}
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, uflags, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)

        parameter = Parameter.query.get(13)
        with mock.patch('util.stream_dataset.ParameterReport.write', new=mock_write):
            result = ctd_stream_dataset._log_algorithm_inputs(parameter, {}, None, self.ctdpf_sk, ctd_ds)
            self.assertIsNotNone(result)
Пример #21
0
    def test_calculate_internal_single_deployment(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn),
                                 decode_times=False)
        ctd_ds = ctd_ds[[
            'obs', 'time', 'deployment', 'temperature', 'pressure',
            'pressure_temp', 'conductivity', 'ext_volt0'
        ]]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        for deployment in ctd_stream_dataset.datasets:
            ds = ctd_stream_dataset.datasets[deployment]
            tempwat = ctd_sbe16plus_tempwat(
                ds.temperature,
                ctd_stream_dataset.events.get_cal('CC_a0', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a1', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a2', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a3', deployment)[0][2])
            np.testing.assert_array_equal(ds.seawater_temperature, tempwat)

            pracsal = ctd_pracsal(ds.seawater_conductivity,
                                  ds.seawater_temperature,
                                  ds.seawater_pressure)
            np.testing.assert_array_equal(ds.practical_salinity, pracsal)
Пример #22
0
    def test_botpt_15s(self):
        botpt_sk = StreamKey('RS03ECAL', 'MJ03E', '06-BOTPTA302', 'streamed',
                             'botpt_nano_sample')
        botpt_15s_sk = StreamKey('RS03ECAL', 'MJ03E', '06-BOTPTA302',
                                 'streamed', 'botpt_nano_sample_15s')
        botpt_fn = 'deployment0001_RS03ECAL-MJ03E-06-BOTPTA302-streamed-botpt_nano_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3674160000.0, 3674181600.1)
        coefficients = {
            k: [{
                'start': tr.start - 1,
                'stop': tr.stop + 1,
                'value': cals[k],
                'deployment': 1
            }]
            for k in cals
        }
        sr = StreamRequest(botpt_15s_sk, [],
                           coefficients,
                           tr, {},
                           request_id='UNIT')
        botps_ds = xr.open_dataset(os.path.join(DATA_DIR, botpt_fn),
                                   decode_times=False)

        botps_ds = botps_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[botpt_sk]]]

        sr.datasets[botpt_sk] = StreamDataset(botpt_sk, sr.coefficients,
                                              sr.uflags, [], sr.request_id)
        sr.datasets[botpt_15s_sk] = StreamDataset(botpt_15s_sk,
                                                  sr.coefficients, sr.uflags,
                                                  [botpt_sk], sr.request_id)
        sr.datasets[botpt_sk]._insert_dataset(botps_ds)

        sr.calculate_derived_products()

        result = sr.datasets[botpt_15s_sk].datasets[1]
        self.assertIn('botsflu_time15s', result)
        self.assertIn('botsflu_meanpres', result)
        self.assertIn('botsflu_meandepth', result)
        self.assertIn('botsflu_5minrate', result)
        self.assertIn('botsflu_10minrate', result)
        self.assertIn('botsflu_predtide', result)
Пример #23
0
    def test_calculate_internal_multiple_deployments(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn),
                                 decode_times=False)
        ctd_ds = ctd_ds[[
            'obs', 'time', 'deployment', 'temperature', 'pressure',
            'pressure_temp', 'conductivity', 'ext_volt0'
        ]]

        # remap times to make this two separate deployments
        dep1_start = self.ctd_events.deps[1].ntp_start
        dep2_stop = self.ctd_events.deps[2].ntp_start + 864000
        ctd_ds.time.values = np.linspace(dep1_start + 1,
                                         dep2_stop - 1,
                                         num=ctd_ds.time.shape[0])

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        for deployment in ctd_stream_dataset.datasets:
            ds = ctd_stream_dataset.datasets[deployment]
            tempwat = ctd_sbe16plus_tempwat(
                ds.temperature,
                ctd_stream_dataset.events.get_cal('CC_a0', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a1', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a2', deployment)[0][2],
                ctd_stream_dataset.events.get_cal('CC_a3', deployment)[0][2])
            np.testing.assert_array_equal(ds.seawater_temperature, tempwat)

            pracsal = ctd_pracsal(ds.seawater_conductivity,
                                  ds.seawater_temperature,
                                  ds.seawater_pressure)
            np.testing.assert_array_equal(ds.practical_salinity, pracsal)
Пример #24
0
    def test_provenance_as_netcdf_attribute_missing(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = copy.deepcopy(self.ctd_events)
        ctd_stream_dataset.events.deps = {}
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.insert_instrument_attributes()
Пример #25
0
    def test_log_algorithm_inputs_no_result(self):
        def mock_write(self):
            return json.dumps(self.m_qdata, default=jdefault)

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {k: [{'start': tr.start - 1, 'stop': tr.stop + 1, 'value': v, 'deployment': 1}]
                        for k, v in self.ctd_nutnr_cals.iteritems()}
        coefficients = CalibrationCoefficientStore(coefficients, 'UNIT')

        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        uflags = {'advancedStreamEngineLogging': True, 'userName': '******'}
        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, coefficients, uflags, [], 'UNIT')
        ctd_stream_dataset._insert_dataset(ctd_ds)

        parameter = Parameter.query.get(911)
        with mock.patch('util.stream_dataset.ParameterReport.write', new=mock_write):
            result = ctd_stream_dataset._log_algorithm_inputs(parameter, {}, None, self.ctdpf_sk, ctd_ds)
            self.assertIsNotNone(result)
Пример #26
0
    def fetch_raw_data(self):
        """
        Fetch the source data for this request
        :return:
        """
        for stream_key, stream_parameters in self.stream_parameters.iteritems(
        ):
            other_streams = set(self.stream_parameters)
            other_streams.remove(stream_key)
            should_pad = stream_key != self.stream_key
            if not stream_key.is_virtual:
                log.debug('<%s> Fetching raw data for %s', self.request_id,
                          stream_key.as_refdes())
                sd = StreamDataset(stream_key, self.coefficients, self.uflags,
                                   other_streams, self.request_id)
                try:
                    sd.fetch_raw_data(self.time_range, self.limit, should_pad)
                    self.datasets[stream_key] = sd
                except MissingDataException as e:
                    if stream_key == self.stream_key:
                        raise MissingDataException(
                            "Query returned no results for primary stream")
                    elif stream_key in self.stream_key.stream.source_streams:
                        raise MissingDataException(
                            "Query returned no results for source stream")
                    else:
                        log.error('<%s> %s', self.request_id, e.message)

            else:
                log.debug('<%s> Creating empty dataset for virtual stream: %s',
                          self.request_id, stream_key.as_refdes())
                sd = StreamDataset(stream_key, self.coefficients, self.uflags,
                                   other_streams, self.request_id)
                self.datasets[stream_key] = sd
    def test_calculate_internal_multiple_deployments(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        # remap times to make this two separate deployments
        dep1_start = self.ctd_events.deps[1].ntp_start
        dep2_stop = self.ctd_events.deps[2].ntp_start + 864000
        ctd_ds.time.values = np.linspace(dep1_start+1, dep2_stop-1, num=ctd_ds.time.shape[0])

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        for deployment in ctd_stream_dataset.datasets:
            ds = ctd_stream_dataset.datasets[deployment]
            tempwat = ctd_sbe16plus_tempwat(ds.temperature,
                                            ctd_stream_dataset.events.get_cal('CC_a0', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a1', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a2', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a3', deployment)[0][2])
            np.testing.assert_array_equal(ds.seawater_temperature, tempwat)

            pracsal = ctd_pracsal(ds.seawater_conductivity,
                                  ds.seawater_temperature,
                                  ds.seawater_pressure)
            np.testing.assert_array_equal(ds.practical_salinity, pracsal)
Пример #28
0
    def test_calculate_internal_multiple_deployments(self):
        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': v, 'deployment': 1},
                            {'start': tr.start-1, 'stop': tr.stop+1, 'value': v, 'deployment': 2}]
                        for k, v in self.ctd_nutnr_cals.iteritems()}

        coefficients = CalibrationCoefficientStore(coefficients, 'UNIT')

        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_ds.deployment.values[:100000] = 1
        ctd_ds.deployment.values[100000:] = 2

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, coefficients, {}, [], 'UNIT')
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_internal()

        for ds in ctd_stream_dataset.datasets.itervalues():
            tempwat = ctd_sbe16plus_tempwat(ds.temperature,
                                            self.ctd_nutnr_cals['CC_a0'], self.ctd_nutnr_cals['CC_a1'],
                                            self.ctd_nutnr_cals['CC_a2'], self.ctd_nutnr_cals['CC_a3'])
            np.testing.assert_array_equal(ds.seawater_temperature, tempwat)

            pracsal = ctd_pracsal(ds.seawater_conductivity,
                                  ds.seawater_temperature,
                                  ds.seawater_pressure)
            np.testing.assert_array_equal(ds.practical_salinity, pracsal)
Пример #29
0
    def test_pressure_depth_renamed(self):
        dosta_fn = 'dosta_abcdjm_cspp_instrument_recovered.nc'
        ctd_fn = 'ctdpf_j_cspp_instrument_recovered.nc'

        dosta_ds = xr.open_dataset(os.path.join(DATA_DIR, dosta_fn),
                                   decode_times=False)
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn),
                                 decode_times=False)

        tr = TimeRange(dosta_ds.time.values[0], dosta_ds.time.values[-1])
        sr = StreamRequest(self.dosta_sk, [], tr, {}, request_id='UNIT')

        dosta_ds = dosta_ds[
            self.base_params +
            [p.name for p in sr.stream_parameters[self.dosta_sk]]]

        sr.datasets[self.dosta_sk] = StreamDataset(self.dosta_sk, sr.uflags,
                                                   [self.ctd_sk2],
                                                   sr.request_id)
        sr.datasets[self.ctd_sk2] = StreamDataset(self.ctd_sk2, sr.uflags,
                                                  [self.dosta_sk],
                                                  sr.request_id)

        sr.datasets[self.dosta_sk]._insert_dataset(dosta_ds)
        sr.datasets[self.ctd_sk2]._insert_dataset(ctd_ds)

        sr.calculate_derived_products()
        sr.import_extra_externals()
        sr.rename_parameters()

        self.assertNotIn('pressure_depth',
                         sr.datasets[self.dosta_sk].datasets[1])
        self.assertIn('pressure', sr.datasets[self.dosta_sk].datasets[1])
        data = json.loads(JsonResponse(sr).json())
        for each in data:
            self.assertNotIn('pressure_depth', each)
            self.assertIn('pressure', each)
Пример #30
0
    def create_echo_sounding_sr(self, parameters=None):
        parameters = [] if parameters is None else parameters
        echo_fn = 'echo_sounding.nc'
        echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn), decode_times=False)
        # somehow the times in this dataset are corrupted. Remap to valid times spanning both deployments
        dep1_start = self.hpies_events.deps[1].ntp_start
        dep2_end = self.hpies_events.deps[2].ntp_start + 864000
        echo_ds.time.values = np.linspace(dep1_start + 1, dep2_end - 1, num=echo_ds.time.shape[0])

        tr = TimeRange(dep1_start, dep2_end)
        sr = StreamRequest(self.echo_sk, parameters, tr, {}, request_id='UNIT')
        sr.datasets[self.echo_sk] = StreamDataset(self.echo_sk, sr.uflags, [], sr.request_id)
        sr.datasets[self.echo_sk].events = self.hpies_events
        sr.datasets[self.echo_sk]._insert_dataset(echo_ds)
        return sr
    def test_provenance_as_netcdf_attribute_missing(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = copy.deepcopy(self.ctd_events)
        ctd_stream_dataset.events.deps = {}
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.insert_instrument_attributes()
Пример #32
0
    def test_fill_missing(self):
        velpt_ds = xr.open_dataset(os.path.join(DATA_DIR, self.velpt_fn), decode_times=False)

        velpt_ds = velpt_ds[['obs', 'time', 'deployment', 'velocity_beam1', 'velocity_beam2', 'velocity_beam3',
                             'amplitude_beam1', 'amplitude_beam2', 'amplitude_beam3']]

        velpt_stream_dataset = StreamDataset(self.velpt_sk, {}, [], 'UNIT')
        velpt_stream_dataset.events = self.velpt_events
        velpt_stream_dataset._insert_dataset(velpt_ds)
        velpt_stream_dataset.fill_missing()

        expected_params = ['eastward_velocity']

        self.assert_parameters_in_datasets(velpt_stream_dataset.datasets, expected_params, expect_fill=True)
    def test_fill_missing(self):
        velpt_ds = xr.open_dataset(os.path.join(DATA_DIR, self.velpt_fn), decode_times=False)

        velpt_ds = velpt_ds[['obs', 'time', 'deployment', 'velocity_beam1', 'velocity_beam2', 'velocity_beam3',
                             'amplitude_beam1', 'amplitude_beam2', 'amplitude_beam3']]

        velpt_stream_dataset = StreamDataset(self.velpt_sk, {}, [], 'UNIT')
        velpt_stream_dataset.events = self.velpt_events
        velpt_stream_dataset._insert_dataset(velpt_ds)
        velpt_stream_dataset.fill_missing()

        expected_params = ['eastward_velocity']

        self.assert_parameters_in_datasets(velpt_stream_dataset.datasets, expected_params, expect_fill=True)
Пример #34
0
    def test_provenance_as_netcdf_attribute(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn),
                                 decode_times=False)
        ctd_ds = ctd_ds[[
            'obs', 'time', 'deployment', 'temperature', 'pressure',
            'pressure_temp', 'conductivity', 'ext_volt0'
        ]]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.insert_instrument_attributes()
        for ds in ctd_stream_dataset.datasets.itervalues():
            self.assertIn('Manufacturer', ds.attrs)
            self.assertIn('ModelNumber', ds.attrs)
            self.assertIn('SerialNumber', ds.attrs)
            self.assertIn('Description', ds.attrs)
            self.assertIn('FirmwareVersion', ds.attrs)
            self.assertIn('SoftwareVersion', ds.attrs)
            self.assertIn('AssetUniqueID', ds.attrs)
            self.assertIn('Notes', ds.attrs)
            self.assertIn('Owner', ds.attrs)
            self.assertIn('RemoteResources', ds.attrs)
            self.assertIn('ShelfLifeExpirationDate', ds.attrs)
            self.assertIn('Mobile', ds.attrs)
            self.assertIn('AssetManagementRecordLastModified', ds.attrs)

            self.assertEqual(ds.attrs['Manufacturer'], 'Sea-Bird Electronics')
            self.assertEqual(ds.attrs['ModelNumber'], 'SBE 16plus V2')
            self.assertEqual(ds.attrs['SerialNumber'], '16-50112')
            self.assertEqual(ds.attrs['Description'],
                             'CTD Profiler: CTDPF Series A')
            self.assertEqual(ds.attrs['AssetUniqueID'], 'ATOSU-66662-00013')
            self.assertEqual(ds.attrs['Mobile'], 'False')
            self.assertEqual(ds.attrs['AssetManagementRecordLastModified'],
                             '2017-04-03T23:48:25.650000')

            self.assertEqual(ds.attrs['FirmwareVersion'], 'Not specified.')
            self.assertEqual(ds.attrs['SoftwareVersion'], 'Not specified.')
            self.assertEqual(ds.attrs['Notes'], 'Not specified.')
            self.assertEqual(ds.attrs['Owner'], 'Not specified.')
            self.assertEqual(ds.attrs['RemoteResources'], '[]')
            self.assertEqual(ds.attrs['ShelfLifeExpirationDate'],
                             'Not specified.')
Пример #35
0
    def test_function_map_scalar(self):
        echo_fn = 'echo_sounding.nc'
        echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn),
                                  decode_times=False)
        echo_sk = StreamKey('RS01SLBS', 'LJ01A', '05-HPIESA101', 'streamed',
                            'echo_sounding')
        tr = TimeRange(0, 99999999)
        sr = StreamRequest(echo_sk, [], {}, tr, {}, request_id='UNIT')
        sr.datasets[echo_sk] = StreamDataset(echo_sk, sr.coefficients,
                                             sr.uflags, [], sr.request_id)
        sr.datasets[echo_sk]._insert_dataset(echo_ds)
        sr.calculate_derived_products()
        sr._add_location()

        expected = {
            'hpies_travel_time1_L1', 'hpies_travel_time2_L1',
            'hpies_travel_time3_L1', 'hpies_travel_time4_L1',
            'hpies_bliley_temperature_L1', 'hpies_pressure_L1'
        }
        missing = expected.difference(sr.datasets[echo_sk].datasets[0])
        self.assertSetEqual(missing, set())
Пример #36
0
    def test_log_algorithm_inputs_no_result(self):
        def mock_write(self):
            return json.dumps(self.m_qdata, default=jdefault)

        uflags = {'advancedStreamEngineLogging': True, 'userName': '******'}
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, uflags, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)

        parameter = Parameter.query.get(13)
        with mock.patch('util.stream_dataset.ParameterReport.write', new=mock_write):
            result = ctd_stream_dataset._log_algorithm_inputs(parameter, {}, None, self.ctdpf_sk, ctd_ds)
            self.assertIsNotNone(result)
Пример #37
0
    def test_add_location(self):
        echo_fn = 'echo_sounding.nc'
        echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn),
                                  decode_times=False)
        echo_ds.deployment.values[:20] = 1
        echo_ds.deployment.values[20:] = 2
        echo_sk = StreamKey('RS01SLBS', 'LJ01A', '05-HPIESA101', 'streamed',
                            'echo_sounding')
        location_info = {
            echo_sk.as_three_part_refdes(): [{
                'deployment': 1,
                'lat': 1,
                'lon': 5
            }, {
                'deployment': 2,
                'lat': 2,
                'lon': 6
            }]
        }
        tr = TimeRange(0, 99999999)
        sr = StreamRequest(echo_sk, [], {},
                           tr, {},
                           location_information=location_info,
                           request_id='UNIT')
        sr.datasets[echo_sk] = StreamDataset(echo_sk, sr.coefficients,
                                             sr.uflags, [], sr.request_id)
        sr.datasets[echo_sk]._insert_dataset(echo_ds)

        sr.calculate_derived_products()
        sr._add_location()

        ds = sr.datasets[echo_sk]

        for deployment, lat, lon in [(1, 1.0, 5.0), (2, 2.0, 6.0)]:
            lats = set(np.unique(ds.datasets[deployment].lat.values))
            lons = set(np.unique(ds.datasets[deployment].lon.values))
            self.assertSetEqual(lats, {lat})
            self.assertSetEqual(lons, {lon})
    def test_provenance_as_netcdf_attribute(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.insert_instrument_attributes()
        for ds in ctd_stream_dataset.datasets.itervalues():
            self.assertIn('Manufacturer', ds.attrs)
            self.assertIn('ModelNumber', ds.attrs)
            self.assertIn('SerialNumber', ds.attrs)
            self.assertIn('Description', ds.attrs)
            self.assertIn('FirmwareVersion', ds.attrs)
            self.assertIn('SoftwareVersion', ds.attrs)
            self.assertIn('AssetUniqueID', ds.attrs)
            self.assertIn('Notes', ds.attrs)
            self.assertIn('Owner', ds.attrs)
            self.assertIn('RemoteResources', ds.attrs)
            self.assertIn('ShelfLifeExpirationDate', ds.attrs)
            self.assertIn('Mobile', ds.attrs)
            self.assertIn('AssetManagementRecordLastModified', ds.attrs)

            self.assertEqual(ds.attrs['Manufacturer'], 'Sea-Bird Electronics')
            self.assertEqual(ds.attrs['ModelNumber'], 'SBE 16plus V2')
            self.assertEqual(ds.attrs['SerialNumber'], '16-50112')
            self.assertEqual(ds.attrs['Description'], 'CTD Profiler: CTDPF Series A')
            self.assertEqual(ds.attrs['AssetUniqueID'], 'ATOSU-66662-00013')
            self.assertEqual(ds.attrs['Mobile'], 'False')
            self.assertEqual(ds.attrs['AssetManagementRecordLastModified'], '2017-04-03T23:48:25.650000')

            self.assertEqual(ds.attrs['FirmwareVersion'], 'Not specified.')
            self.assertEqual(ds.attrs['SoftwareVersion'], 'Not specified.')
            self.assertEqual(ds.attrs['Notes'], 'Not specified.')
            self.assertEqual(ds.attrs['Owner'], 'Not specified.')
            self.assertEqual(ds.attrs['RemoteResources'], '[]')
            self.assertEqual(ds.attrs['ShelfLifeExpirationDate'], 'Not specified.')
    def test_calculate_internal_single_deployment(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        for deployment in ctd_stream_dataset.datasets:
            ds = ctd_stream_dataset.datasets[deployment]
            tempwat = ctd_sbe16plus_tempwat(ds.temperature,
                                            ctd_stream_dataset.events.get_cal('CC_a0', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a1', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a2', deployment)[0][2],
                                            ctd_stream_dataset.events.get_cal('CC_a3', deployment)[0][2])
            np.testing.assert_array_equal(ds.seawater_temperature, tempwat)

            pracsal = ctd_pracsal(ds.seawater_conductivity,
                                  ds.seawater_temperature,
                                  ds.seawater_pressure)
            np.testing.assert_array_equal(ds.practical_salinity, pracsal)
    def test_calculate_external_single_deployment(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn), decode_times=False)

        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]
        nut_ds = nut_ds[['obs', 'time', 'deployment', 'spectral_channels',
                         'frame_type', 'nutnr_dark_value_used_for_fit']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        nut_stream_dataset = StreamDataset(self.nutnr_sk, {}, [self.ctdpf_sk], 'UNIT')
        nut_stream_dataset.events = self.nut_events
        nut_stream_dataset._insert_dataset(nut_ds)
        nut_stream_dataset.calculate_all()

        nut_stream_dataset.interpolate_needed({self.ctdpf_sk: ctd_stream_dataset})
        nut_stream_dataset.calculate_all()

        expected_params = ['ctdpf_sbe43_sample-seawater_temperature',
                           'ctdpf_sbe43_sample-practical_salinity',
                           'salinity_corrected_nitrate']
        self.assert_parameters_in_datasets(nut_stream_dataset.datasets, expected_params)
    def test_calculate_external_12035(self):
        velpt_ds = xr.open_dataset(os.path.join(DATA_DIR, self.velpt_fn), decode_times=False)
        metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, self.metbk_fn), decode_times=False)

        velpt_ds = velpt_ds[['obs', 'time', 'deployment', 'velocity_beam1', 'velocity_beam2', 'velocity_beam3',
                             'amplitude_beam1', 'amplitude_beam2', 'amplitude_beam3']]

        metbk_ds = metbk_ds[['obs', 'time', 'deployment', 'barometric_pressure', 'relative_humidity',
                             'air_temperature', 'longwave_irradiance', 'precipitation', 'sea_surface_temperature',
                             'sea_surface_conductivity', 'shortwave_irradiance', 'eastward_wind_velocity',
                             'northward_wind_velocity']]

        velpt_stream_dataset = StreamDataset(self.velpt_sk, {}, [], 'UNIT')
        velpt_stream_dataset.events = self.velpt_events
        velpt_stream_dataset._insert_dataset(velpt_ds)
        velpt_stream_dataset.calculate_all()

        metbk_stream_dataset = StreamDataset(self.metbk_sk, {}, [self.velpt_sk], 'UNIT')
        metbk_stream_dataset.events = self.metbk_events
        metbk_stream_dataset._insert_dataset(metbk_ds)
        metbk_stream_dataset.calculate_all()
        metbk_stream_dataset.interpolate_needed({self.velpt_sk: velpt_stream_dataset})
        metbk_stream_dataset.calculate_all()

        expected_params = ['met_barpres',
                           'met_windavg_mag_corr_east',
                           'met_windavg_mag_corr_north',
                           'met_current_direction',
                           'met_current_speed',
                           'met_relwind_direction',
                           'met_relwind_speed',
                           'met_netsirr',
                           'met_salsurf',
                           'met_spechum',
                           'met_heatflx_minute',
                           'met_latnflx_minute',
                           'met_netlirr_minute',
                           'met_sensflx_minute',
                           ]
        self.assert_parameters_in_datasets(metbk_stream_dataset.datasets, expected_params)
Пример #42
0
    def test_exclude_data(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn),
                                 decode_times=False)
        ctd_ds = ctd_ds[[
            'obs', 'time', 'deployment', 'temperature', 'pressure',
            'pressure_temp', 'conductivity', 'ext_volt0'
        ]]

        times = ctd_ds.time.values
        store = AnnotationStore()

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)

        ctd_stream_dataset.exclude_flagged_data(store)
        np.testing.assert_array_equal(
            times, ctd_stream_dataset.datasets[2].time.values)

        # exclude a bit
        start = ntplib.ntp_to_system_time(times[0]) * 1000
        stop = ntplib.ntp_to_system_time(times[100]) * 1000
        anno = self._create_exclusion_anno(self.ctdpf_sk, start, stop)
        store.add_annotations([anno])

        ctd_stream_dataset.exclude_flagged_data(store)
        np.testing.assert_array_equal(
            times[101:], ctd_stream_dataset.datasets[2].time.values)

        # exclude everything
        start = ntplib.ntp_to_system_time(times[0]) * 1000
        stop = ntplib.ntp_to_system_time(times[-1]) * 1000
        anno = self._create_exclusion_anno(self.ctdpf_sk, start, stop)
        store.add_annotations([anno])

        ctd_stream_dataset.exclude_flagged_data(store)
        self.assertNotIn(2, ctd_stream_dataset.datasets)
Пример #43
0
    def test_calculate_external_multiple_deployments(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn),
                                 decode_times=False)
        nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn),
                                 decode_times=False)

        ctd_ds = ctd_ds[[
            'obs', 'time', 'deployment', 'temperature', 'pressure',
            'pressure_temp', 'conductivity', 'ext_volt0'
        ]]
        nut_ds = nut_ds[[
            'obs', 'time', 'deployment', 'spectral_channels', 'frame_type',
            'nutnr_dark_value_used_for_fit'
        ]]

        # remap times to make this two separate deployments
        dep1_start = self.ctd_events.deps[1].ntp_start
        dep2_stop = self.ctd_events.deps[2].ntp_start + 864000
        ctd_ds.time.values = np.linspace(dep1_start + 1,
                                         dep2_stop - 1,
                                         num=ctd_ds.time.shape[0])
        nut_ds.time.values = np.linspace(dep1_start + 1,
                                         dep2_stop - 1,
                                         num=nut_ds.time.shape[0])

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        nut_stream_dataset = StreamDataset(self.nutnr_sk, {}, [self.ctdpf_sk],
                                           'UNIT')
        nut_stream_dataset.events = self.nut_events
        nut_stream_dataset._insert_dataset(nut_ds)
        nut_stream_dataset.calculate_all()

        nut_stream_dataset.interpolate_needed(
            {self.ctdpf_sk: ctd_stream_dataset})
        nut_stream_dataset.calculate_all()

        expected_params = [
            'ctdpf_sbe43_sample-seawater_temperature',
            'ctdpf_sbe43_sample-practical_salinity',
            'salinity_corrected_nitrate'
        ]
        self.assert_parameters_in_datasets(nut_stream_dataset.datasets,
                                           expected_params)
Пример #44
0
    def test_calculate_external_12035(self):
        velpt_ds = xr.open_dataset(os.path.join(DATA_DIR, self.velpt_fn),
                                   decode_times=False)
        metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, self.metbk_fn),
                                   decode_times=False)

        velpt_ds = velpt_ds[[
            'obs', 'time', 'deployment', 'velocity_beam1', 'velocity_beam2',
            'velocity_beam3', 'amplitude_beam1', 'amplitude_beam2',
            'amplitude_beam3'
        ]]

        metbk_ds = metbk_ds[[
            'obs', 'time', 'deployment', 'barometric_pressure',
            'relative_humidity', 'air_temperature', 'longwave_irradiance',
            'precipitation', 'sea_surface_temperature',
            'sea_surface_conductivity', 'shortwave_irradiance',
            'eastward_wind_velocity', 'northward_wind_velocity'
        ]]

        velpt_stream_dataset = StreamDataset(self.velpt_sk, {}, [], 'UNIT')
        velpt_stream_dataset.events = self.velpt_events
        velpt_stream_dataset._insert_dataset(velpt_ds)
        velpt_stream_dataset.calculate_all()

        metbk_stream_dataset = StreamDataset(self.metbk_sk, {},
                                             [self.velpt_sk], 'UNIT')
        metbk_stream_dataset.events = self.metbk_events
        metbk_stream_dataset._insert_dataset(metbk_ds)
        metbk_stream_dataset.calculate_all()
        metbk_stream_dataset.interpolate_needed(
            {self.velpt_sk: velpt_stream_dataset})
        metbk_stream_dataset.calculate_all()

        expected_params = [
            'met_barpres',
            'met_windavg_mag_corr_east',
            'met_windavg_mag_corr_north',
            'met_current_direction',
            'met_current_speed',
            'met_relwind_direction',
            'met_relwind_speed',
            'met_netsirr',
            'met_salsurf',
            'met_spechum',
            'met_heatflx_minute',
            'met_latnflx_minute',
            'met_netlirr_minute',
            'met_sensflx_minute',
        ]
        self.assert_parameters_in_datasets(metbk_stream_dataset.datasets,
                                           expected_params)
    def test_calculate_external_multiple_deployments(self):
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn), decode_times=False)

        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]
        nut_ds = nut_ds[['obs', 'time', 'deployment', 'spectral_channels',
                         'frame_type', 'nutnr_dark_value_used_for_fit']]

        # remap times to make this two separate deployments
        dep1_start = self.ctd_events.deps[1].ntp_start
        dep2_stop = self.ctd_events.deps[2].ntp_start + 864000
        ctd_ds.time.values = np.linspace(dep1_start + 1, dep2_stop - 1, num=ctd_ds.time.shape[0])
        nut_ds.time.values = np.linspace(dep1_start + 1, dep2_stop - 1, num=nut_ds.time.shape[0])

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, {}, [], 'UNIT')
        ctd_stream_dataset.events = self.ctd_events
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_all()

        nut_stream_dataset = StreamDataset(self.nutnr_sk, {}, [self.ctdpf_sk], 'UNIT')
        nut_stream_dataset.events = self.nut_events
        nut_stream_dataset._insert_dataset(nut_ds)
        nut_stream_dataset.calculate_all()

        nut_stream_dataset.interpolate_needed({self.ctdpf_sk: ctd_stream_dataset})
        nut_stream_dataset.calculate_all()

        expected_params = ['ctdpf_sbe43_sample-seawater_temperature',
                           'ctdpf_sbe43_sample-practical_salinity',
                           'salinity_corrected_nitrate']
        self.assert_parameters_in_datasets(nut_stream_dataset.datasets, expected_params)
Пример #46
0
    def fetch_raw_data(self):
        """
        Fetch the source data for this request
        :return:
        """
        # Start fetching calibration data from Asset Management
        am_events = {}
        am_futures = {}
        for stream_key in self.stream_parameters:
            refdes = '-'.join((stream_key.subsite, stream_key.node, stream_key.sensor))
            am_futures[stream_key] = self.asset_management.get_events_async(refdes)

        # Resolve calibration data futures and attach to instrument data
        for stream_key in am_futures:
            events = am_futures[stream_key].result()
            am_events[stream_key] = events

        # Start fetching instrument data
        for stream_key, stream_parameters in self.stream_parameters.iteritems():
            other_streams = set(self.stream_parameters)
            other_streams.remove(stream_key)
            should_pad = stream_key != self.stream_key
            if not stream_key.is_virtual:
                log.debug('<%s> Fetching raw data for %s', self.request_id, stream_key.as_refdes())
                sd = StreamDataset(stream_key, self.uflags, other_streams, self.request_id)
                sd.events = am_events[stream_key]
                try:
                    sd.fetch_raw_data(self.time_range, self.limit, should_pad)
                    self.datasets[stream_key] = sd
                except MissingDataException as e:
                    if stream_key == self.stream_key:
                        raise MissingDataException("Query returned no results for primary stream")
                    elif stream_key.stream in self.stream_key.stream.source_streams:
                        raise MissingDataException("Query returned no results for source stream")
                    else:
                        log.error('<%s> %s', self.request_id, e.message)

            else:
                log.debug('<%s> Creating empty dataset for virtual stream: %s',
                          self.request_id, stream_key.as_refdes())
                sd = StreamDataset(stream_key, self.uflags, other_streams, self.request_id)
                sd.events = am_events[stream_key]
                self.datasets[stream_key] = sd

        # Fetch annotations
        self._insert_annotations()
        self._exclude_flagged_data()
        self._exclude_nondeployed_data()

        # Verify data still exists after masking virtual
        message = 'Query returned no results for %s stream (due to deployment or annotation mask)'
        if self.stream_key.is_virtual:
            found_streams = [stream.stream for stream in self.datasets
                             if self.datasets[stream]]
            if not any(stream in self.stream_key.stream.source_streams for stream in found_streams):
                raise MissingDataException(message % 'source')
        # real
        else:
            primary_stream_dataset = self.datasets[self.stream_key]
            if not primary_stream_dataset.datasets:
                raise MissingDataException(message % 'primary')

        # Remove any empty, non-virtual supporting datasets
        for stream_key in list(self.datasets):
            if not stream_key.is_virtual:
                if not self.datasets[stream_key].datasets:
                    del self.datasets[stream_key]
Пример #47
0
    def test_calculate_external_single_deployment(self):
        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': v, 'deployment': 1}]
                        for k, v in self.ctd_nutnr_cals.iteritems()}
        coefficients = CalibrationCoefficientStore(coefficients, 'UNIT')

        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, self.ctdpf_fn), decode_times=False)
        nut_ds = xr.open_dataset(os.path.join(DATA_DIR, self.nutnr_fn), decode_times=False)

        ctd_ds = ctd_ds[['obs', 'time', 'deployment', 'temperature', 'pressure',
                         'pressure_temp', 'conductivity', 'ext_volt0']]
        nut_ds = nut_ds[['obs', 'time', 'deployment', 'spectral_channels',
                         'frame_type', 'nutnr_dark_value_used_for_fit']]

        ctd_stream_dataset = StreamDataset(self.ctdpf_sk, coefficients, {}, [], 'UNIT')
        ctd_stream_dataset._insert_dataset(ctd_ds)
        ctd_stream_dataset.calculate_internal()

        nut_stream_dataset = StreamDataset(self.nutnr_sk, coefficients, {}, [self.ctdpf_sk], 'UNIT')
        nut_stream_dataset._insert_dataset(nut_ds)
        nut_stream_dataset.calculate_internal()

        nut_stream_dataset.interpolate_needed({self.ctdpf_sk: ctd_stream_dataset})
        nut_stream_dataset.calculate_external()

        expected_params = ['ctdpf_sbe43_sample-seawater_temperature',
                           'ctdpf_sbe43_sample-practical_salinity',
                           'temp_sal_corrected_nitrate']
        self.assert_parameters_in_datasets(nut_stream_dataset.datasets, expected_params)