def test_qc(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))
        qc = json.load(open(os.path.join(DATA_DIR, 'qc.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals}
        sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, qc_parameters=qc, request_id='UNIT')

        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False)
        ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]]
        nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()

        expected_parameters = ['temp_sal_corrected_nitrate_qc_executed',
                               'temp_sal_corrected_nitrate_qc_results']
        self.assert_parameters_in_datasets(sr.datasets[nutnr_sk].datasets, expected_parameters)
    def test_add_externals(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals}
        sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT')

        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False)
        ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]]
        nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()
        sr.import_extra_externals()

        self.assertIn('ctdpf_sbe43_sample-seawater_pressure', sr.datasets[nutnr_sk].datasets[0])
        self.assertNotIn('ctdpf_sbe43_sample-seawater_pressure', sr.datasets[ctdpf_sk].datasets[0])

        data = json.loads(JsonResponse(sr).json())
        for each in data:
            self.assertIn('int_ctd_pressure', each)
Beispiel #3
0
    def test_cspp_nutnr_uses_ctd_pressure(self):
        nut_fn = 'nutnr_j_cspp_instrument_recovered.nc'
        ctd_fn = 'ctdpf_j_cspp_instrument_recovered.nc'

        nut_ds = xr.open_dataset(os.path.join(DATA_DIR, nut_fn),
                                 decode_times=False)
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn),
                                 decode_times=False)

        tr = TimeRange(nut_ds.time.values[0], nut_ds.time.values[-1])
        sr = StreamRequest(self.nut_sk2, [], tr, {}, request_id='UNIT')

        nut_ds = nut_ds[self.base_params +
                        [p.name for p in sr.stream_parameters[self.nut_sk2]]]

        sr.datasets[self.ctd_sk2] = StreamDataset(self.ctd_sk2, sr.uflags,
                                                  [self.nut_sk2],
                                                  sr.request_id)
        sr.datasets[self.nut_sk2] = StreamDataset(self.nut_sk2, sr.uflags,
                                                  [self.ctd_sk2],
                                                  sr.request_id)

        sr.datasets[self.ctd_sk2]._insert_dataset(ctd_ds)
        sr.datasets[self.nut_sk2]._insert_dataset(nut_ds)

        sr.calculate_derived_products()
        sr.import_extra_externals()
        sr.rename_parameters()

        self.assertIn('int_ctd_pressure',
                      sr.datasets[self.nut_sk2].datasets[1])

        data = json.loads(JsonResponse(sr).json())
        for each in data:
            self.assertIn('int_ctd_pressure', each)
    def test_botpt_24hr(self):
        botpt_sk = StreamKey('RS03ECAL', 'MJ03E', '06-BOTPTA302', 'streamed', 'botpt_nano_sample')
        botpt_24hr_sk = StreamKey('RS03ECAL', 'MJ03E', '06-BOTPTA302', 'streamed', 'botpt_nano_sample_24hr')
        botpt_fn = 'deployment0001_RS03ECAL-MJ03E-06-BOTPTA302-streamed-botpt_nano_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3674160000.0, 3674181600.1)
        coefficients = {k: [{'start': tr.start - 1, 'stop': tr.stop + 1, 'value': cals[k], 'deployment': 1}] for k in
                        cals}
        sr = StreamRequest(botpt_24hr_sk, [], coefficients, tr, {}, request_id='UNIT')
        botps_ds = xr.open_dataset(os.path.join(DATA_DIR, botpt_fn), decode_times=False)

        botps_ds = botps_ds[self.base_params + [p.name for p in sr.stream_parameters[botpt_sk]]]

        sr.datasets[botpt_sk] = StreamDataset(botpt_sk, sr.coefficients, sr.uflags, [], sr.request_id)
        sr.datasets[botpt_24hr_sk] = StreamDataset(botpt_24hr_sk, sr.coefficients, sr.uflags, [botpt_sk], sr.request_id)
        sr.datasets[botpt_sk]._insert_dataset(botps_ds)

        sr.calculate_derived_products()

        result = sr.datasets[botpt_24hr_sk].datasets[1]
        self.assertIn('botsflu_time24h', result)  # TODO - input is currently defined as TIME15S, should be TIME?
        self.assertIn('botsflu_daydepth', result)
        self.assertIn('botsflu_4wkrate', result)
        self.assertIn('botsflu_8wkrate', result)
    def test_csv(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals}
        sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT')
        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False)

        nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]]
        ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()
        csv = CsvGenerator(sr, ',').to_csv()
        self.assertTrue(csv)
Beispiel #6
0
    def test_calculate(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed',
                             'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed',
                             'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {
            k: [{
                'start': tr.start - 1,
                'stop': tr.stop + 1,
                'value': cals[k],
                'deployment': 1
            }]
            for k in cals
        }
        sr = StreamRequest(nutnr_sk, [2443],
                           coefficients,
                           tr, {},
                           request_id='UNIT')
        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn),
                                   decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn),
                                   decode_times=False)

        nutnr_ds = nutnr_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[nutnr_sk]]]
        ctdpf_ds = ctdpf_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[ctdpf_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients,
                                              sr.uflags, [nutnr_sk],
                                              sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients,
                                              sr.uflags, [ctdpf_sk],
                                              sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()

        ds = sr.datasets[ctdpf_sk]
        tempwat = ctd_sbe16plus_tempwat(ds.datasets[0].temperature,
                                        cals['CC_a0'], cals['CC_a1'],
                                        cals['CC_a2'], cals['CC_a3'])
        np.testing.assert_array_equal(ds.datasets[0].seawater_temperature,
                                      tempwat)

        pracsal = ctd_pracsal(ds.datasets[0].seawater_conductivity,
                              ds.datasets[0].seawater_temperature,
                              ds.datasets[0].seawater_pressure)
        np.testing.assert_array_equal(ds.datasets[0].practical_salinity,
                                      pracsal)

        response = json.loads(JsonResponse(sr).json())
        self.assertEqual(len(response), len(nutnr_ds.time.values))
Beispiel #7
0
    def test_add_externals(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed',
                             'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed',
                             'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {
            k: [{
                'start': tr.start - 1,
                'stop': tr.stop + 1,
                'value': cals[k],
                'deployment': 1
            }]
            for k in cals
        }
        sr = StreamRequest(nutnr_sk, [2443],
                           coefficients,
                           tr, {},
                           request_id='UNIT')

        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn),
                                   decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn),
                                   decode_times=False)
        ctdpf_ds = ctdpf_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[ctdpf_sk]]]
        nutnr_ds = nutnr_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[nutnr_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients,
                                              sr.uflags, [nutnr_sk],
                                              sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients,
                                              sr.uflags, [ctdpf_sk],
                                              sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()
        sr.import_extra_externals()

        self.assertIn('ctdpf_sbe43_sample-seawater_pressure',
                      sr.datasets[nutnr_sk].datasets[0])
        self.assertNotIn('ctdpf_sbe43_sample-seawater_pressure',
                         sr.datasets[ctdpf_sk].datasets[0])

        data = json.loads(JsonResponse(sr).json())
        for each in data:
            self.assertIn('int_ctd_pressure', each)
Beispiel #8
0
    def test_qc(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed',
                             'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed',
                             'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))
        qc = json.load(open(os.path.join(DATA_DIR, 'qc.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {
            k: [{
                'start': tr.start - 1,
                'stop': tr.stop + 1,
                'value': cals[k],
                'deployment': 1
            }]
            for k in cals
        }
        sr = StreamRequest(nutnr_sk, [2443],
                           coefficients,
                           tr, {},
                           qc_parameters=qc,
                           request_id='UNIT')

        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn),
                                   decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn),
                                   decode_times=False)
        ctdpf_ds = ctdpf_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[ctdpf_sk]]]
        nutnr_ds = nutnr_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[nutnr_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients,
                                              sr.uflags, [nutnr_sk],
                                              sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients,
                                              sr.uflags, [ctdpf_sk],
                                              sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()

        expected_parameters = [
            'temp_sal_corrected_nitrate_qc_executed',
            'temp_sal_corrected_nitrate_qc_results'
        ]
        self.assert_parameters_in_datasets(sr.datasets[nutnr_sk].datasets,
                                           expected_parameters)
Beispiel #9
0
    def test_add_externals_glider(self):
        gps_fn = 'deployment0003_CE05MOAS-GL319-00-ENG000000-recovered_host-glider_gps_position.nc'
        par_fn = 'deployment0003_CE05MOAS-GL319-01-PARADM000-recovered_host-parad_m_glider_recovered.nc'
        ctd_fn = 'deployment0003_CE05MOAS-GL319-05-CTDGVM000-recovered_host-ctdgv_m_glider_instrument_recovered.nc'

        gps_sk = StreamKey('CE05MOAS', 'GL319', '00-ENG000000', 'recovered_host', 'glider_gps_position')
        par_sk = StreamKey('CE05MOAS', 'GL319', '01-PARADM000', 'recovered_host', 'parad_m_glider_recovered')
        ctd_sk = StreamKey('CE05MOAS', 'GL319', '05-CTDGVM000', 'recovered_host', 'ctdgv_m_glider_instrument_recovered')

        # Fetch the source data
        gps_ds = xr.open_dataset(os.path.join(DATA_DIR, gps_fn), decode_times=False)
        par_ds = xr.open_dataset(os.path.join(DATA_DIR, par_fn), decode_times=False)
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn), decode_times=False)

        # Create the stream request
        tr = TimeRange(par_ds.time.values[0], par_ds.time.values[-1])
        sr = StreamRequest(par_sk, [], tr, {}, request_id='UNIT')

        # Filter the source data to just the data the stream request says we need
        gps_ds = gps_ds[self.base_params + [p.name for p in sr.stream_parameters[gps_sk]]]
        par_ds = par_ds[self.base_params + [p.name for p in sr.stream_parameters[par_sk]]]
        ctd_ds = ctd_ds[self.base_params + [p.name for p in sr.stream_parameters[ctd_sk]]]

        # Create the StreamDataset objects
        sr.datasets[gps_sk] = StreamDataset(gps_sk, sr.uflags, [par_sk, ctd_sk], sr.request_id)
        sr.datasets[par_sk] = StreamDataset(par_sk, sr.uflags, [gps_sk, ctd_sk], sr.request_id)
        sr.datasets[ctd_sk] = StreamDataset(ctd_sk, sr.uflags, [par_sk, gps_sk], sr.request_id)

        sr.datasets[gps_sk].events = self.get_events(gps_sk)
        sr.datasets[par_sk].events = self.get_events(par_sk)
        sr.datasets[ctd_sk].events = self.get_events(ctd_sk)

        # Insert the source data
        sr.datasets[gps_sk]._insert_dataset(gps_ds)
        sr.datasets[par_sk]._insert_dataset(par_ds)
        sr.datasets[ctd_sk]._insert_dataset(ctd_ds)

        sr.calculate_derived_products()
        sr.import_extra_externals()

        # Ticket 9328: int_ctd_pressure is now set in stream_request.import_extra_externals()
        self.assertNotIn('ctdgv_m_glider_instrument_recovered-sci_water_pressure_dbar', sr.datasets[par_sk].datasets[3])
        self.assertIn('int_ctd_pressure', sr.datasets[par_sk].datasets[3])
        self.assertNotIn('ctdgv_m_glider_instrument_recovered-sci_water_pressure_dbar', sr.datasets[ctd_sk].datasets[3])

        data = json.loads(JsonResponse(sr).json())
        for each in data:
            self.assertIn('int_ctd_pressure', each)
            self.assertIn('lat', each)
            self.assertIn('lon', each)
    def test_function_map_scalar(self):
        echo_fn = 'echo_sounding.nc'
        echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn), decode_times=False)
        echo_sk = StreamKey('RS01SLBS', 'LJ01A', '05-HPIESA101', 'streamed', 'echo_sounding')
        tr = TimeRange(0, 99999999)
        sr = StreamRequest(echo_sk, [], {}, tr, {}, request_id='UNIT')
        sr.datasets[echo_sk] = StreamDataset(echo_sk, sr.coefficients, sr.uflags, [], sr.request_id)
        sr.datasets[echo_sk]._insert_dataset(echo_ds)
        sr.calculate_derived_products()
        sr._add_location()

        expected = {'hpies_travel_time1_L1', 'hpies_travel_time2_L1', 'hpies_travel_time3_L1', 'hpies_travel_time4_L1',
                    'hpies_bliley_temperature_L1', 'hpies_pressure_L1'}
        missing = expected.difference(sr.datasets[echo_sk].datasets[0])
        self.assertSetEqual(missing, set())
    def test_add_externals_glider(self):
        gps_fn = 'deployment0003_CE05MOAS-GL319-00-ENG000000-recovered_host-glider_gps_position.nc'
        par_fn = 'deployment0003_CE05MOAS-GL319-01-PARADM000-recovered_host-parad_m_glider_recovered.nc'
        ctd_fn = 'deployment0003_CE05MOAS-GL319-05-CTDGVM000-recovered_host-ctdgv_m_glider_instrument_recovered.nc'

        gps_sk = StreamKey('CE05MOAS', 'GL319', '00-ENG000000', 'recovered_host', 'glider_gps_position')
        par_sk = StreamKey('CE05MOAS', 'GL319', '01-PARADM000', 'recovered_host', 'parad_m_glider_recovered')
        ctd_sk = StreamKey('CE05MOAS', 'GL319', '05-CTDGVM000', 'recovered_host', 'ctdgv_m_glider_instrument_recovered')

        # Fetch the source data
        gps_ds = xr.open_dataset(os.path.join(DATA_DIR, gps_fn), decode_times=False)
        par_ds = xr.open_dataset(os.path.join(DATA_DIR, par_fn), decode_times=False)
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn), decode_times=False)

        # Create the stream request
        tr = TimeRange(par_ds.time.values[0], par_ds.time.values[-1])
        sr = StreamRequest(par_sk, [], tr, {}, request_id='UNIT')

        # Filter the source data to just the data the stream request says we need
        gps_ds = gps_ds[self.base_params + [p.name for p in sr.stream_parameters[gps_sk]]]
        par_ds = par_ds[self.base_params + [p.name for p in sr.stream_parameters[par_sk]]]
        ctd_ds = ctd_ds[self.base_params + [p.name for p in sr.stream_parameters[ctd_sk]]]

        # Create the StreamDataset objects
        sr.datasets[gps_sk] = StreamDataset(gps_sk, sr.uflags, [par_sk, ctd_sk], sr.request_id)
        sr.datasets[par_sk] = StreamDataset(par_sk, sr.uflags, [gps_sk, ctd_sk], sr.request_id)
        sr.datasets[ctd_sk] = StreamDataset(ctd_sk, sr.uflags, [par_sk, gps_sk], sr.request_id)

        sr.datasets[gps_sk].events = self.get_events(gps_sk)
        sr.datasets[par_sk].events = self.get_events(par_sk)
        sr.datasets[ctd_sk].events = self.get_events(ctd_sk)

        # Insert the source data
        sr.datasets[gps_sk]._insert_dataset(gps_ds)
        sr.datasets[par_sk]._insert_dataset(par_ds)
        sr.datasets[ctd_sk]._insert_dataset(ctd_ds)

        sr.calculate_derived_products()
        sr.import_extra_externals()

        self.assertIn('ctdgv_m_glider_instrument_recovered-sci_water_pressure_dbar', sr.datasets[par_sk].datasets[3])
        self.assertNotIn('ctdgv_m_glider_instrument_recovered-sci_water_pressure_dbar', sr.datasets[ctd_sk].datasets[3])

        data = json.loads(JsonResponse(sr).json())
        for each in data:
            self.assertIn('int_ctd_pressure', each)
            self.assertIn('lat', each)
            self.assertIn('lon', each)
Beispiel #12
0
    def test_csv(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed',
                             'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed',
                             'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {
            k: [{
                'start': tr.start - 1,
                'stop': tr.stop + 1,
                'value': cals[k],
                'deployment': 1
            }]
            for k in cals
        }
        sr = StreamRequest(nutnr_sk, [2443],
                           coefficients,
                           tr, {},
                           request_id='UNIT')
        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn),
                                   decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn),
                                   decode_times=False)

        nutnr_ds = nutnr_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[nutnr_sk]]]
        ctdpf_ds = ctdpf_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[ctdpf_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients,
                                              sr.uflags, [nutnr_sk],
                                              sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients,
                                              sr.uflags, [ctdpf_sk],
                                              sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()
        csv = CsvGenerator(sr, ',').to_csv()
        self.assertTrue(csv)
Beispiel #13
0
    def test_botpt_15s(self):
        botpt_sk = StreamKey('RS03ECAL', 'MJ03E', '06-BOTPTA302', 'streamed',
                             'botpt_nano_sample')
        botpt_15s_sk = StreamKey('RS03ECAL', 'MJ03E', '06-BOTPTA302',
                                 'streamed', 'botpt_nano_sample_15s')
        botpt_fn = 'deployment0001_RS03ECAL-MJ03E-06-BOTPTA302-streamed-botpt_nano_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3674160000.0, 3674181600.1)
        coefficients = {
            k: [{
                'start': tr.start - 1,
                'stop': tr.stop + 1,
                'value': cals[k],
                'deployment': 1
            }]
            for k in cals
        }
        sr = StreamRequest(botpt_15s_sk, [],
                           coefficients,
                           tr, {},
                           request_id='UNIT')
        botps_ds = xr.open_dataset(os.path.join(DATA_DIR, botpt_fn),
                                   decode_times=False)

        botps_ds = botps_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[botpt_sk]]]

        sr.datasets[botpt_sk] = StreamDataset(botpt_sk, sr.coefficients,
                                              sr.uflags, [], sr.request_id)
        sr.datasets[botpt_15s_sk] = StreamDataset(botpt_15s_sk,
                                                  sr.coefficients, sr.uflags,
                                                  [botpt_sk], sr.request_id)
        sr.datasets[botpt_sk]._insert_dataset(botps_ds)

        sr.calculate_derived_products()

        result = sr.datasets[botpt_15s_sk].datasets[1]
        self.assertIn('botsflu_time15s', result)
        self.assertIn('botsflu_meanpres', result)
        self.assertIn('botsflu_meandepth', result)
        self.assertIn('botsflu_5minrate', result)
        self.assertIn('botsflu_10minrate', result)
        self.assertIn('botsflu_predtide', result)
Beispiel #14
0
    def test_function_map_scalar(self):
        echo_fn = 'echo_sounding.nc'
        echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn),
                                  decode_times=False)
        echo_sk = StreamKey('RS01SLBS', 'LJ01A', '05-HPIESA101', 'streamed',
                            'echo_sounding')
        tr = TimeRange(0, 99999999)
        sr = StreamRequest(echo_sk, [], {}, tr, {}, request_id='UNIT')
        sr.datasets[echo_sk] = StreamDataset(echo_sk, sr.coefficients,
                                             sr.uflags, [], sr.request_id)
        sr.datasets[echo_sk]._insert_dataset(echo_ds)
        sr.calculate_derived_products()
        sr._add_location()

        expected = {
            'hpies_travel_time1_L1', 'hpies_travel_time2_L1',
            'hpies_travel_time3_L1', 'hpies_travel_time4_L1',
            'hpies_bliley_temperature_L1', 'hpies_pressure_L1'
        }
        missing = expected.difference(sr.datasets[echo_sk].datasets[0])
        self.assertSetEqual(missing, set())
    def test_metbk_hourly(self):
        cals = {
            'CC_lat': 40.13678333,
            'CC_lon': -70.76978333,
            'CC_depth_of_conductivity_and_temperature_measurements_m': 1.0668,
            'CC_height_of_air_humidity_measurement_m': 4.2926,
            'CC_height_of_air_temperature_measurement_m': 4.2926,
            'CC_height_of_windspeed_sensor_above_sealevel_m': 4.7498,
            'CC_jcool': 1,
            'CC_jwarm': 1,
            'CC_zinvpbl': 600,
        }

        metbk_fn = 'metbk_a_dcl_instrument_recovered.nc'
        metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, metbk_fn), decode_times=False)
        vel_fn = 'velpt_ab_dcl_instrument_recovered.nc'
        vel_ds = xr.open_dataset(os.path.join(DATA_DIR, vel_fn), decode_times=False)

        hourly_sk = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000', 'recovered_host', 'metbk_hourly')
        source_sk = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000', 'recovered_host', 'metbk_a_dcl_instrument_recovered')
        vel_sk = StreamKey('GI01SUMO', 'RID16', '04-VELPTA000', 'recovered_host', 'velpt_ab_dcl_instrument_recovered')

        tr = TimeRange(metbk_ds.time.values[0], metbk_ds.time.values[-1])
        coefficients = {k: [{'start': tr.start-1000, 'stop': tr.stop+1000, 'value': cals[k], 'deployment': 3}] for k in cals}
        sr = StreamRequest(hourly_sk, [], coefficients, tr, {}, request_id='UNIT')

        metbk_ds = metbk_ds[self.base_params + [p.name for p in sr.stream_parameters[source_sk]]]
        vel_ds = vel_ds[self.base_params + [p.name for p in sr.stream_parameters[vel_sk]]]

        sr.datasets[source_sk] = StreamDataset(source_sk, sr.coefficients, sr.uflags, [hourly_sk, vel_sk], sr.request_id)
        sr.datasets[hourly_sk] = StreamDataset(hourly_sk, sr.coefficients, sr.uflags, [source_sk, vel_sk], sr.request_id)
        sr.datasets[vel_sk] = StreamDataset(vel_sk, sr.coefficients, sr.uflags, [hourly_sk, vel_sk], sr.request_id)

        sr.datasets[source_sk]._insert_dataset(metbk_ds)
        sr.datasets[vel_sk]._insert_dataset(vel_ds)

        sr.calculate_derived_products()

        expected_params = [p.name for p in hourly_sk.stream.parameters] + ['obs', 'time', 'deployment', 'lat', 'lon']
        self.assertListEqual(sorted(expected_params), sorted(sr.datasets[hourly_sk].datasets[3]))
Beispiel #16
0
    def test_add_location(self):
        echo_fn = 'echo_sounding.nc'
        echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn),
                                  decode_times=False)
        echo_ds.deployment.values[:20] = 1
        echo_ds.deployment.values[20:] = 2
        echo_sk = StreamKey('RS01SLBS', 'LJ01A', '05-HPIESA101', 'streamed',
                            'echo_sounding')
        location_info = {
            echo_sk.as_three_part_refdes(): [{
                'deployment': 1,
                'lat': 1,
                'lon': 5
            }, {
                'deployment': 2,
                'lat': 2,
                'lon': 6
            }]
        }
        tr = TimeRange(0, 99999999)
        sr = StreamRequest(echo_sk, [], {},
                           tr, {},
                           location_information=location_info,
                           request_id='UNIT')
        sr.datasets[echo_sk] = StreamDataset(echo_sk, sr.coefficients,
                                             sr.uflags, [], sr.request_id)
        sr.datasets[echo_sk]._insert_dataset(echo_ds)

        sr.calculate_derived_products()
        sr._add_location()

        ds = sr.datasets[echo_sk]

        for deployment, lat, lon in [(1, 1.0, 5.0), (2, 2.0, 6.0)]:
            lats = set(np.unique(ds.datasets[deployment].lat.values))
            lons = set(np.unique(ds.datasets[deployment].lon.values))
            self.assertSetEqual(lats, {lat})
            self.assertSetEqual(lons, {lon})
    def test_add_location(self):
        echo_fn = 'echo_sounding.nc'
        echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn), decode_times=False)
        echo_ds.deployment.values[:20] = 1
        echo_ds.deployment.values[20:] = 2
        echo_sk = StreamKey('RS01SLBS', 'LJ01A', '05-HPIESA101', 'streamed', 'echo_sounding')
        location_info = {echo_sk.as_three_part_refdes(): [{'deployment': 1, 'lat': 1, 'lon': 5},
                                                          {'deployment': 2, 'lat': 2, 'lon': 6}]}
        tr = TimeRange(0, 99999999)
        sr = StreamRequest(echo_sk, [], {}, tr, {}, location_information=location_info, request_id='UNIT')
        sr.datasets[echo_sk] = StreamDataset(echo_sk, sr.coefficients, sr.uflags, [], sr.request_id)
        sr.datasets[echo_sk]._insert_dataset(echo_ds)

        sr.calculate_derived_products()
        sr._add_location()

        ds = sr.datasets[echo_sk]

        for deployment, lat, lon in [(1, 1.0, 5.0), (2, 2.0, 6.0)]:
            lats = set(np.unique(ds.datasets[deployment].lat.values))
            lons = set(np.unique(ds.datasets[deployment].lon.values))
            self.assertSetEqual(lats, {lat})
            self.assertSetEqual(lons, {lon})
Beispiel #18
0
    def test_pressure_depth_renamed(self):
        dosta_fn = 'dosta_abcdjm_cspp_instrument_recovered.nc'
        ctd_fn = 'ctdpf_j_cspp_instrument_recovered.nc'

        dosta_ds = xr.open_dataset(os.path.join(DATA_DIR, dosta_fn),
                                   decode_times=False)
        ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn),
                                 decode_times=False)

        tr = TimeRange(dosta_ds.time.values[0], dosta_ds.time.values[-1])
        sr = StreamRequest(self.dosta_sk, [], tr, {}, request_id='UNIT')

        dosta_ds = dosta_ds[
            self.base_params +
            [p.name for p in sr.stream_parameters[self.dosta_sk]]]

        sr.datasets[self.dosta_sk] = StreamDataset(self.dosta_sk, sr.uflags,
                                                   [self.ctd_sk2],
                                                   sr.request_id)
        sr.datasets[self.ctd_sk2] = StreamDataset(self.ctd_sk2, sr.uflags,
                                                  [self.dosta_sk],
                                                  sr.request_id)

        sr.datasets[self.dosta_sk]._insert_dataset(dosta_ds)
        sr.datasets[self.ctd_sk2]._insert_dataset(ctd_ds)

        sr.calculate_derived_products()
        sr.import_extra_externals()
        sr.rename_parameters()

        self.assertNotIn('pressure_depth',
                         sr.datasets[self.dosta_sk].datasets[1])
        self.assertIn('pressure', sr.datasets[self.dosta_sk].datasets[1])
        data = json.loads(JsonResponse(sr).json())
        for each in data:
            self.assertNotIn('pressure_depth', each)
            self.assertIn('pressure', each)
    def test_calculate(self):
        nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample')
        ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample')
        nutnr_fn = 'nutnr_a_sample.nc'
        ctdpf_fn = 'ctdpf_sbe43_sample.nc'

        cals = json.load(open(os.path.join(DATA_DIR, 'cals.json')))

        tr = TimeRange(3.65342400e+09, 3.65351040e+09)
        coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals}
        sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT')
        nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False)
        ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False)

        nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]]
        ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]]

        sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id)
        sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id)
        sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds)
        sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds)

        sr.calculate_derived_products()

        ds = sr.datasets[ctdpf_sk]
        tempwat = ctd_sbe16plus_tempwat(ds.datasets[0].temperature,
                                        cals['CC_a0'], cals['CC_a1'],
                                        cals['CC_a2'], cals['CC_a3'])
        np.testing.assert_array_equal(ds.datasets[0].seawater_temperature, tempwat)

        pracsal = ctd_pracsal(ds.datasets[0].seawater_conductivity,
                              ds.datasets[0].seawater_temperature,
                              ds.datasets[0].seawater_pressure)
        np.testing.assert_array_equal(ds.datasets[0].practical_salinity, pracsal)

        response = json.loads(JsonResponse(sr).json())
        self.assertEqual(len(response), len(nutnr_ds.time.values))
Beispiel #20
0
    def test_metbk_hourly(self):
        cals = {
            'CC_lat': 40.13678333,
            'CC_lon': -70.76978333,
            'CC_depth_of_conductivity_and_temperature_measurements_m': 1.0668,
            'CC_height_of_air_humidity_measurement_m': 4.2926,
            'CC_height_of_air_temperature_measurement_m': 4.2926,
            'CC_height_of_windspeed_sensor_above_sealevel_m': 4.7498,
            'CC_jcool': 1,
            'CC_jwarm': 1,
            'CC_zinvpbl': 600,
        }

        metbk_fn = 'metbk_a_dcl_instrument_recovered.nc'
        metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, metbk_fn),
                                   decode_times=False)
        vel_fn = 'velpt_ab_dcl_instrument_recovered.nc'
        vel_ds = xr.open_dataset(os.path.join(DATA_DIR, vel_fn),
                                 decode_times=False)

        hourly_sk = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000',
                              'recovered_host', 'metbk_hourly')
        source_sk = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000',
                              'recovered_host',
                              'metbk_a_dcl_instrument_recovered')
        vel_sk = StreamKey('GI01SUMO', 'RID16', '04-VELPTA000',
                           'recovered_host',
                           'velpt_ab_dcl_instrument_recovered')

        tr = TimeRange(metbk_ds.time.values[0], metbk_ds.time.values[-1])
        coefficients = {
            k: [{
                'start': tr.start - 1000,
                'stop': tr.stop + 1000,
                'value': cals[k],
                'deployment': 3
            }]
            for k in cals
        }
        sr = StreamRequest(hourly_sk, [],
                           coefficients,
                           tr, {},
                           request_id='UNIT')

        metbk_ds = metbk_ds[self.base_params +
                            [p.name for p in sr.stream_parameters[source_sk]]]
        vel_ds = vel_ds[self.base_params +
                        [p.name for p in sr.stream_parameters[vel_sk]]]

        sr.datasets[source_sk] = StreamDataset(source_sk, sr.coefficients,
                                               sr.uflags, [hourly_sk, vel_sk],
                                               sr.request_id)
        sr.datasets[hourly_sk] = StreamDataset(hourly_sk, sr.coefficients,
                                               sr.uflags, [source_sk, vel_sk],
                                               sr.request_id)
        sr.datasets[vel_sk] = StreamDataset(vel_sk, sr.coefficients, sr.uflags,
                                            [hourly_sk, vel_sk], sr.request_id)

        sr.datasets[source_sk]._insert_dataset(metbk_ds)
        sr.datasets[vel_sk]._insert_dataset(vel_ds)

        sr.calculate_derived_products()

        expected_params = [p.name for p in hourly_sk.stream.parameters
                           ] + ['obs', 'time', 'deployment', 'lat', 'lon']
        self.assertListEqual(sorted(expected_params),
                             sorted(sr.datasets[hourly_sk].datasets[3]))