def test_qc(self): nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample') ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample') nutnr_fn = 'nutnr_a_sample.nc' ctdpf_fn = 'ctdpf_sbe43_sample.nc' cals = json.load(open(os.path.join(DATA_DIR, 'cals.json'))) qc = json.load(open(os.path.join(DATA_DIR, 'qc.json'))) tr = TimeRange(3.65342400e+09, 3.65351040e+09) coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals} sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, qc_parameters=qc, request_id='UNIT') nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False) ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False) ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]] nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]] sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id) sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id) sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds) sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds) sr.calculate_derived_products() expected_parameters = ['temp_sal_corrected_nitrate_qc_executed', 'temp_sal_corrected_nitrate_qc_results'] self.assert_parameters_in_datasets(sr.datasets[nutnr_sk].datasets, expected_parameters)
def test_csv(self): nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample') ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample') nutnr_fn = 'nutnr_a_sample.nc' ctdpf_fn = 'ctdpf_sbe43_sample.nc' cals = json.load(open(os.path.join(DATA_DIR, 'cals.json'))) tr = TimeRange(3.65342400e+09, 3.65351040e+09) coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals} sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT') nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False) ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False) nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]] ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]] sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id) sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id) sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds) sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds) sr.calculate_derived_products() csv = CsvGenerator(sr, ',').to_csv() self.assertTrue(csv)
def test_botpt_24hr(self): botpt_sk = StreamKey('RS03ECAL', 'MJ03E', '06-BOTPTA302', 'streamed', 'botpt_nano_sample') botpt_24hr_sk = StreamKey('RS03ECAL', 'MJ03E', '06-BOTPTA302', 'streamed', 'botpt_nano_sample_24hr') botpt_fn = 'deployment0001_RS03ECAL-MJ03E-06-BOTPTA302-streamed-botpt_nano_sample.nc' cals = json.load(open(os.path.join(DATA_DIR, 'cals.json'))) tr = TimeRange(3674160000.0, 3674181600.1) coefficients = {k: [{'start': tr.start - 1, 'stop': tr.stop + 1, 'value': cals[k], 'deployment': 1}] for k in cals} sr = StreamRequest(botpt_24hr_sk, [], coefficients, tr, {}, request_id='UNIT') botps_ds = xr.open_dataset(os.path.join(DATA_DIR, botpt_fn), decode_times=False) botps_ds = botps_ds[self.base_params + [p.name for p in sr.stream_parameters[botpt_sk]]] sr.datasets[botpt_sk] = StreamDataset(botpt_sk, sr.coefficients, sr.uflags, [], sr.request_id) sr.datasets[botpt_24hr_sk] = StreamDataset(botpt_24hr_sk, sr.coefficients, sr.uflags, [botpt_sk], sr.request_id) sr.datasets[botpt_sk]._insert_dataset(botps_ds) sr.calculate_derived_products() result = sr.datasets[botpt_24hr_sk].datasets[1] self.assertIn('botsflu_time24h', result) # TODO - input is currently defined as TIME15S, should be TIME? self.assertIn('botsflu_daydepth', result) self.assertIn('botsflu_4wkrate', result) self.assertIn('botsflu_8wkrate', result)
def test_cspp_nutnr_uses_ctd_pressure(self): nut_fn = 'nutnr_j_cspp_instrument_recovered.nc' ctd_fn = 'ctdpf_j_cspp_instrument_recovered.nc' nut_ds = xr.open_dataset(os.path.join(DATA_DIR, nut_fn), decode_times=False) ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn), decode_times=False) tr = TimeRange(nut_ds.time.values[0], nut_ds.time.values[-1]) sr = StreamRequest(self.nut_sk2, [], tr, {}, request_id='UNIT') nut_ds = nut_ds[self.base_params + [p.name for p in sr.stream_parameters[self.nut_sk2]]] sr.datasets[self.ctd_sk2] = StreamDataset(self.ctd_sk2, sr.uflags, [self.nut_sk2], sr.request_id) sr.datasets[self.nut_sk2] = StreamDataset(self.nut_sk2, sr.uflags, [self.ctd_sk2], sr.request_id) sr.datasets[self.ctd_sk2]._insert_dataset(ctd_ds) sr.datasets[self.nut_sk2]._insert_dataset(nut_ds) sr.calculate_derived_products() sr.import_extra_externals() sr.rename_parameters() self.assertIn('int_ctd_pressure', sr.datasets[self.nut_sk2].datasets[1]) data = json.loads(JsonResponse(sr).json()) for each in data: self.assertIn('int_ctd_pressure', each)
def create_nut_sr(self): nutnr_fn = 'nutnr_a_sample.nc' ctdpf_fn = 'ctdpf_sbe43_sample.nc' qc = json.load(open(os.path.join(DATA_DIR, 'qc.json'))) tr = TimeRange(3.65342400e+09, 3.65351040e+09) sr = StreamRequest(self.nut_sk, [18], tr, {}, qc_parameters=qc, request_id='UNIT') nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False) ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False) nutnr_ds = nutnr_ds[ self.base_params + [p.name for p in sr.stream_parameters[self.nut_sk]]] ctdpf_ds = ctdpf_ds[ self.base_params + [p.name for p in sr.stream_parameters[self.ctd_sk]]] sr.datasets[self.ctd_sk] = StreamDataset(self.ctd_sk, sr.uflags, [self.nut_sk], sr.request_id) sr.datasets[self.nut_sk] = StreamDataset(self.nut_sk, sr.uflags, [self.ctd_sk], sr.request_id) sr.datasets[self.ctd_sk].events = self.ctd_events sr.datasets[self.nut_sk].events = self.nut_events sr.datasets[self.ctd_sk]._insert_dataset(ctdpf_ds) sr.datasets[self.nut_sk]._insert_dataset(nutnr_ds) return sr
def create_metbk_hourly_sr(self): metbk_fn = 'metbk_a_dcl_instrument_recovered.nc' metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, metbk_fn), decode_times=False) vel_fn = 'velpt_ab_dcl_instrument_recovered.nc' vel_ds = xr.open_dataset(os.path.join(DATA_DIR, vel_fn), decode_times=False) # both of these datasets are labeled deployment 3 but the times are squarely in deployment 1. Fix. metbk_ds.deployment.values[:] = 1 vel_ds.deployment.values[:] = 1 tr = TimeRange(metbk_ds.time.values[0], metbk_ds.time.values[-1]) sr = StreamRequest(self.hourly_sk, [], tr, {}, request_id='UNIT') metbk_ds = metbk_ds[self.base_params + [p.name for p in sr.stream_parameters[self.met_sk]]] vel_ds = vel_ds[self.base_params + [p.name for p in sr.stream_parameters[self.vel_sk]]] sr.datasets[self.met_sk] = StreamDataset(self.met_sk, sr.uflags, [self.hourly_sk, self.vel_sk], sr.request_id) sr.datasets[self.hourly_sk] = StreamDataset(self.hourly_sk, sr.uflags, [self.met_sk, self.vel_sk], sr.request_id) sr.datasets[self.vel_sk] = StreamDataset(self.vel_sk, sr.uflags, [self.hourly_sk, self.met_sk], sr.request_id) sr.datasets[self.hourly_sk].events = self.met_events sr.datasets[self.met_sk].events = self.met_events sr.datasets[self.vel_sk].events = self.vel_events sr.datasets[self.met_sk]._insert_dataset(metbk_ds) sr.datasets[self.vel_sk]._insert_dataset(vel_ds) return sr
def test_virtual(self): sk1 = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000', 'recovered_host', 'metbk_hourly') sk2 = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000', 'recovered_host', 'metbk_a_dcl_instrument_recovered') sk3 = StreamKey('GI01SUMO', 'RID16', '04-VELPTA000', 'recovered_host', 'velpt_ab_dcl_instrument_recovered') tr = TimeRange(3617736678.149051, 3661524609.0570827) sr = StreamRequest(sk1, [], {}, tr, {}, request_id='UNIT') self.assertEqual(set(sr.stream_parameters), {sk1, sk2, sk3})
def test_need_internal(self): sk = StreamKey('RS03AXBS', 'LJ03A', '12-CTDPFB301', 'streamed', 'ctdpf_optode_sample') tr = TimeRange(3617736678.149051, 3661524609.0570827) sr = StreamRequest(sk, [911], {}, tr, {}, request_id='UNIT') # if internal only, no external stream should exist in stream_parameters self.assertEqual(set(sr.stream_parameters), {sk})
def test_metbk_hourly_needs(self): hourly_sk = StreamKey('CP01CNSM', 'SBD11', '06-METBKA000', 'telemetered', 'metbk_hourly') met_sk = StreamKey('CP01CNSM', 'SBD11', '06-METBKA000', 'telemetered', 'metbk_a_dcl_instrument') vel_sk = StreamKey('CP01CNSM', 'RID26', '04-VELPTA000', 'telemetered', 'velpt_ab_dcl_instrument') tr = TimeRange(0, 99999999) sr = StreamRequest(hourly_sk, [], {}, tr, {}, request_id='UNIT') self.assertEqual(set(sr.stream_parameters), {hourly_sk, met_sk, vel_sk}) self.assertEqual(set(sr.unfulfilled), set([]))
def test_add_externals(self): nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample') ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample') nutnr_fn = 'nutnr_a_sample.nc' ctdpf_fn = 'ctdpf_sbe43_sample.nc' cals = json.load(open(os.path.join(DATA_DIR, 'cals.json'))) tr = TimeRange(3.65342400e+09, 3.65351040e+09) coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals} sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT') nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False) ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False) ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]] nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]] sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id) sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id) sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds) sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds) sr.calculate_derived_products() sr.import_extra_externals() self.assertIn('ctdpf_sbe43_sample-seawater_pressure', sr.datasets[nutnr_sk].datasets[0]) self.assertNotIn('ctdpf_sbe43_sample-seawater_pressure', sr.datasets[ctdpf_sk].datasets[0]) data = json.loads(JsonResponse(sr).json()) for each in data: self.assertIn('int_ctd_pressure', each)
def test_add_externals_glider(self): gps_fn = 'deployment0003_CE05MOAS-GL319-00-ENG000000-recovered_host-glider_gps_position.nc' par_fn = 'deployment0003_CE05MOAS-GL319-01-PARADM000-recovered_host-parad_m_glider_recovered.nc' ctd_fn = 'deployment0003_CE05MOAS-GL319-05-CTDGVM000-recovered_host-ctdgv_m_glider_instrument_recovered.nc' gps_sk = StreamKey('CE05MOAS', 'GL319', '00-ENG000000', 'recovered_host', 'glider_gps_position') par_sk = StreamKey('CE05MOAS', 'GL319', '01-PARADM000', 'recovered_host', 'parad_m_glider_recovered') ctd_sk = StreamKey('CE05MOAS', 'GL319', '05-CTDGVM000', 'recovered_host', 'ctdgv_m_glider_instrument_recovered') # Fetch the source data gps_ds = xr.open_dataset(os.path.join(DATA_DIR, gps_fn), decode_times=False) par_ds = xr.open_dataset(os.path.join(DATA_DIR, par_fn), decode_times=False) ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn), decode_times=False) # Create the stream request tr = TimeRange(par_ds.time.values[0], par_ds.time.values[-1]) sr = StreamRequest(par_sk, [], tr, {}, request_id='UNIT') # Filter the source data to just the data the stream request says we need gps_ds = gps_ds[self.base_params + [p.name for p in sr.stream_parameters[gps_sk]]] par_ds = par_ds[self.base_params + [p.name for p in sr.stream_parameters[par_sk]]] ctd_ds = ctd_ds[self.base_params + [p.name for p in sr.stream_parameters[ctd_sk]]] # Create the StreamDataset objects sr.datasets[gps_sk] = StreamDataset(gps_sk, sr.uflags, [par_sk, ctd_sk], sr.request_id) sr.datasets[par_sk] = StreamDataset(par_sk, sr.uflags, [gps_sk, ctd_sk], sr.request_id) sr.datasets[ctd_sk] = StreamDataset(ctd_sk, sr.uflags, [par_sk, gps_sk], sr.request_id) sr.datasets[gps_sk].events = self.get_events(gps_sk) sr.datasets[par_sk].events = self.get_events(par_sk) sr.datasets[ctd_sk].events = self.get_events(ctd_sk) # Insert the source data sr.datasets[gps_sk]._insert_dataset(gps_ds) sr.datasets[par_sk]._insert_dataset(par_ds) sr.datasets[ctd_sk]._insert_dataset(ctd_ds) sr.calculate_derived_products() sr.import_extra_externals() # Ticket 9328: int_ctd_pressure is now set in stream_request.import_extra_externals() self.assertNotIn('ctdgv_m_glider_instrument_recovered-sci_water_pressure_dbar', sr.datasets[par_sk].datasets[3]) self.assertIn('int_ctd_pressure', sr.datasets[par_sk].datasets[3]) self.assertNotIn('ctdgv_m_glider_instrument_recovered-sci_water_pressure_dbar', sr.datasets[ctd_sk].datasets[3]) data = json.loads(JsonResponse(sr).json()) for each in data: self.assertIn('int_ctd_pressure', each) self.assertIn('lat', each) self.assertIn('lon', each)
def test_wfp_include_preswat(self): par_sk = StreamKey('CP02PMUO', 'WFP01', '05-PARADK000', 'recovered_wfp', 'parad_k__stc_imodem_instrument_recovered') ctd_sk = StreamKey('CP02PMUO', 'WFP01', '03-CTDPFK000', 'recovered_wfp', 'ctdpf_ckl_wfp_instrument_recovered') tr = TimeRange(3594211324.0, 3653837045.0) sr = StreamRequest(par_sk, [], {}, tr, {}, request_id='UNIT') # we expect to fetch the PRESWAT from the co-located CTD self.assertEqual(set(sr.stream_parameters), {par_sk, ctd_sk})
def test_basic_stream_request(self): pid = 1527 pid_string = 'PD%d' % pid sk = StreamKey('CP05MOAS', 'GL388', '03-CTDGVM000', 'recovered_host', 'ctdgv_m_glider_instrument_recovered') tr = TimeRange(3.622409e+09, 3.627058e+09) sr = StreamRequest(sk, [pid], {}, tr, {}, request_id='UNIT') self.assertEqual(len(sr.requested_parameters), 1) self.assertEqual(sr.requested_parameters[0].asdict()['pd_id'], pid_string)
def test_need_dpi(self): # OPTAA specifies that it needs dpi_PRACSAL_L2 # first, an OPTAA with a colocated SBE43 sk = StreamKey('RS03AXPS', 'SF03A', '3B-OPTAAD301', 'streamed', 'optaa_sample') sk2 = StreamKey('RS03AXPS', 'SF03A', '2A-CTDPFA302', 'streamed', 'ctdpf_sbe43_sample') tr = TimeRange(3617736678.149051, 3661524609.0570827) sr = StreamRequest(sk, [], {}, tr, {}, request_id='UNIT') self.assertEqual(set(sr.stream_parameters), {sk, sk2})
def test_glider_include_preswat_gps(self): do_sk = StreamKey('CP05MOAS', 'GL388', '04-DOSTAM000', 'recovered_host', 'dosta_abcdjm_glider_recovered') ctd_sk = StreamKey('CP05MOAS', 'GL388', '03-CTDGVM000', 'recovered_host', 'ctdgv_m_glider_instrument_recovered') gps_sk = StreamKey('CP05MOAS', 'GL388', '00-ENG000000', 'recovered_host', 'glider_gps_position') tr = TimeRange(3.622409e+09, 3.627058e+09) sr = StreamRequest(do_sk, [], {}, tr, {}, request_id='UNIT') # we expect to fetch the PRESWAT from the ctd glider stream and LAT/LON from the gps position stream self.assertEqual(set(sr.stream_parameters), {do_sk, ctd_sk, gps_sk})
def test_add_externals(self): nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample') ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample') nutnr_fn = 'nutnr_a_sample.nc' ctdpf_fn = 'ctdpf_sbe43_sample.nc' cals = json.load(open(os.path.join(DATA_DIR, 'cals.json'))) tr = TimeRange(3.65342400e+09, 3.65351040e+09) coefficients = { k: [{ 'start': tr.start - 1, 'stop': tr.stop + 1, 'value': cals[k], 'deployment': 1 }] for k in cals } sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT') nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False) ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False) ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]] nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]] sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id) sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id) sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds) sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds) sr.calculate_derived_products() sr.import_extra_externals() self.assertIn('ctdpf_sbe43_sample-seawater_pressure', sr.datasets[nutnr_sk].datasets[0]) self.assertNotIn('ctdpf_sbe43_sample-seawater_pressure', sr.datasets[ctdpf_sk].datasets[0]) data = json.loads(JsonResponse(sr).json()) for each in data: self.assertIn('int_ctd_pressure', each)
def test_external_virtual(self): """ make sure that external virtual stream parameters can be resolved (c.f. PR # 9196) """ # only endurance (CE**) has a colocated VELPT # pco2_sk = StreamKey('GI01SUMO', 'SBD12', '04-PCO2AA000', 'telemetered', 'pco2a_a_dcl_instrument_air') pco2_sk = StreamKey('CE07SHSM', 'SBD12', '04-PCO2AA000', 'telemetered', 'pco2a_a_dcl_instrument_air') tr = TimeRange(0, 99999999) sr = StreamRequest(pco2_sk, [], {}, tr, {}, request_id='UNIT') self.assertEqual(set(sr.unfulfilled), set([]))
def test_need_external(self): # nutnr_a_sample requests PD908 and PD911 sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample') sk2 = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample') tr = TimeRange(3617736678.149051, 3661524609.0570827) sr = StreamRequest(sk, [], {}, tr, {}, request_id='UNIT') self.assertEqual(set(sr.stream_parameters), {sk, sk2})
def test_calculate(self): nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample') ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample') nutnr_fn = 'nutnr_a_sample.nc' ctdpf_fn = 'ctdpf_sbe43_sample.nc' cals = json.load(open(os.path.join(DATA_DIR, 'cals.json'))) tr = TimeRange(3.65342400e+09, 3.65351040e+09) coefficients = { k: [{ 'start': tr.start - 1, 'stop': tr.stop + 1, 'value': cals[k], 'deployment': 1 }] for k in cals } sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT') nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False) ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False) nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]] ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]] sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id) sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id) sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds) sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds) sr.calculate_derived_products() ds = sr.datasets[ctdpf_sk] tempwat = ctd_sbe16plus_tempwat(ds.datasets[0].temperature, cals['CC_a0'], cals['CC_a1'], cals['CC_a2'], cals['CC_a3']) np.testing.assert_array_equal(ds.datasets[0].seawater_temperature, tempwat) pracsal = ctd_pracsal(ds.datasets[0].seawater_conductivity, ds.datasets[0].seawater_temperature, ds.datasets[0].seawater_pressure) np.testing.assert_array_equal(ds.datasets[0].practical_salinity, pracsal) response = json.loads(JsonResponse(sr).json()) self.assertEqual(len(response), len(nutnr_ds.time.values))
def test_add_externals_glider(self): gps_fn = 'deployment0003_CE05MOAS-GL319-00-ENG000000-recovered_host-glider_gps_position.nc' par_fn = 'deployment0003_CE05MOAS-GL319-01-PARADM000-recovered_host-parad_m_glider_recovered.nc' ctd_fn = 'deployment0003_CE05MOAS-GL319-05-CTDGVM000-recovered_host-ctdgv_m_glider_instrument_recovered.nc' gps_sk = StreamKey('CE05MOAS', 'GL319', '00-ENG000000', 'recovered_host', 'glider_gps_position') par_sk = StreamKey('CE05MOAS', 'GL319', '01-PARADM000', 'recovered_host', 'parad_m_glider_recovered') ctd_sk = StreamKey('CE05MOAS', 'GL319', '05-CTDGVM000', 'recovered_host', 'ctdgv_m_glider_instrument_recovered') # Fetch the source data gps_ds = xr.open_dataset(os.path.join(DATA_DIR, gps_fn), decode_times=False) par_ds = xr.open_dataset(os.path.join(DATA_DIR, par_fn), decode_times=False) ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn), decode_times=False) # Create the stream request tr = TimeRange(par_ds.time.values[0], par_ds.time.values[-1]) sr = StreamRequest(par_sk, [], tr, {}, request_id='UNIT') # Filter the source data to just the data the stream request says we need gps_ds = gps_ds[self.base_params + [p.name for p in sr.stream_parameters[gps_sk]]] par_ds = par_ds[self.base_params + [p.name for p in sr.stream_parameters[par_sk]]] ctd_ds = ctd_ds[self.base_params + [p.name for p in sr.stream_parameters[ctd_sk]]] # Create the StreamDataset objects sr.datasets[gps_sk] = StreamDataset(gps_sk, sr.uflags, [par_sk, ctd_sk], sr.request_id) sr.datasets[par_sk] = StreamDataset(par_sk, sr.uflags, [gps_sk, ctd_sk], sr.request_id) sr.datasets[ctd_sk] = StreamDataset(ctd_sk, sr.uflags, [par_sk, gps_sk], sr.request_id) sr.datasets[gps_sk].events = self.get_events(gps_sk) sr.datasets[par_sk].events = self.get_events(par_sk) sr.datasets[ctd_sk].events = self.get_events(ctd_sk) # Insert the source data sr.datasets[gps_sk]._insert_dataset(gps_ds) sr.datasets[par_sk]._insert_dataset(par_ds) sr.datasets[ctd_sk]._insert_dataset(ctd_ds) sr.calculate_derived_products() sr.import_extra_externals() self.assertIn('ctdgv_m_glider_instrument_recovered-sci_water_pressure_dbar', sr.datasets[par_sk].datasets[3]) self.assertNotIn('ctdgv_m_glider_instrument_recovered-sci_water_pressure_dbar', sr.datasets[ctd_sk].datasets[3]) data = json.loads(JsonResponse(sr).json()) for each in data: self.assertIn('int_ctd_pressure', each) self.assertIn('lat', each) self.assertIn('lon', each)
def test_calculate(self): nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample') ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample') nutnr_fn = 'nutnr_a_sample.nc' ctdpf_fn = 'ctdpf_sbe43_sample.nc' cals = json.load(open(os.path.join(DATA_DIR, 'cals.json'))) tr = TimeRange(3.65342400e+09, 3.65351040e+09) coefficients = {k: [{'start': tr.start-1, 'stop': tr.stop+1, 'value': cals[k], 'deployment': 1}] for k in cals} sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT') nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False) ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False) nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]] ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]] sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id) sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id) sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds) sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds) sr.calculate_derived_products() ds = sr.datasets[ctdpf_sk] tempwat = ctd_sbe16plus_tempwat(ds.datasets[0].temperature, cals['CC_a0'], cals['CC_a1'], cals['CC_a2'], cals['CC_a3']) np.testing.assert_array_equal(ds.datasets[0].seawater_temperature, tempwat) pracsal = ctd_pracsal(ds.datasets[0].seawater_conductivity, ds.datasets[0].seawater_temperature, ds.datasets[0].seawater_pressure) np.testing.assert_array_equal(ds.datasets[0].practical_salinity, pracsal) response = json.loads(JsonResponse(sr).json()) self.assertEqual(len(response), len(nutnr_ds.time.values))
def test_glider_include_preswat_gps(self): do_sk = StreamKey('CP05MOAS', 'GL388', '04-DOSTAM000', 'recovered_host', 'dosta_abcdjm_glider_recovered') ctd_sk = StreamKey('CP05MOAS', 'GL388', '03-CTDGVM000', 'recovered_host', 'ctdgv_m_glider_instrument_recovered') gps_sk = StreamKey('CP05MOAS', 'GL388', '00-ENG000000', 'recovered_host', 'glider_gps_position') adc_sk = StreamKey('CP05MOAS', 'GL388', '01-ADCPAM000', 'recovered_host', 'adcp_velocity_glider') tr = TimeRange(3.622409e+09, 3.627058e+09) sr = StreamRequest(do_sk, [], {}, tr, {}, request_id='UNIT') self.assertEqual(set(sr.stream_parameters), {do_sk, ctd_sk, gps_sk, adc_sk})
def create_echo_sounding_sr(self, parameters=None): parameters = [] if parameters is None else parameters echo_fn = 'echo_sounding.nc' echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn), decode_times=False) # somehow the times in this dataset are corrupted. Remap to valid times spanning both deployments dep1_start = self.hpies_events.deps[1].ntp_start dep2_end = self.hpies_events.deps[2].ntp_start + 864000 echo_ds.time.values = np.linspace(dep1_start + 1, dep2_end - 1, num=echo_ds.time.shape[0]) tr = TimeRange(dep1_start, dep2_end) sr = StreamRequest(self.echo_sk, parameters, tr, {}, request_id='UNIT') sr.datasets[self.echo_sk] = StreamDataset(self.echo_sk, sr.uflags, [], sr.request_id) sr.datasets[self.echo_sk].events = self.hpies_events sr.datasets[self.echo_sk]._insert_dataset(echo_ds) return sr
def test_virtual(self): sk1 = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000', 'recovered_host', 'metbk_hourly') sk2 = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000', 'recovered_host', 'metbk_a_dcl_instrument_recovered') # either velpt_ab_dcl_instrument_recovered or velpt_ab_dcl_diagnostics_recovered depending on order in preload sk3 = StreamKey('GI01SUMO', 'RID16', '04-VELPTA000', 'recovered_host', 'velpt_ab_dcl_diagnostics_recovered') sk4 = StreamKey('GI01SUMO', 'RID16', '04-VELPTA000', 'recovered_host', 'velpt_ab_dcl_instrument_recovered') tr = TimeRange(3617736678.149051, 3661524609.0570827) sr = StreamRequest(sk1, [], {}, tr, {}, request_id='UNIT') if sk3 in set(sr.stream_parameters): self.assertEqual(set(sr.stream_parameters), {sk1, sk2, sk3}) else: self.assertEqual(set(sr.stream_parameters), {sk1, sk2, sk4})
def test_qc(self): nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample') ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample') nutnr_fn = 'nutnr_a_sample.nc' ctdpf_fn = 'ctdpf_sbe43_sample.nc' cals = json.load(open(os.path.join(DATA_DIR, 'cals.json'))) qc = json.load(open(os.path.join(DATA_DIR, 'qc.json'))) tr = TimeRange(3.65342400e+09, 3.65351040e+09) coefficients = { k: [{ 'start': tr.start - 1, 'stop': tr.stop + 1, 'value': cals[k], 'deployment': 1 }] for k in cals } sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, qc_parameters=qc, request_id='UNIT') nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False) ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False) ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]] nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]] sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id) sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id) sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds) sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds) sr.calculate_derived_products() expected_parameters = [ 'temp_sal_corrected_nitrate_qc_executed', 'temp_sal_corrected_nitrate_qc_results' ] self.assert_parameters_in_datasets(sr.datasets[nutnr_sk].datasets, expected_parameters)
def test_metbk_hourly_needs(self): hourly_sk = StreamKey('CP01CNSM', 'SBD11', '06-METBKA000', 'telemetered', 'metbk_hourly') met_sk = StreamKey('CP01CNSM', 'SBD11', '06-METBKA000', 'telemetered', 'metbk_a_dcl_instrument') # depending on the order in preload, either velpt_ab_dcl_diagnostics or velpt_ab_dcl_instrument can be used vel_sk = StreamKey('CP01CNSM', 'RID26', '04-VELPTA000', 'telemetered', 'velpt_ab_dcl_diagnostics') vel_sk2 = StreamKey('CP01CNSM', 'RID26', '04-VELPTA000', 'telemetered', 'velpt_ab_dcl_instrument') tr = TimeRange(0, 99999999) sr = StreamRequest(hourly_sk, [], {}, tr, {}, request_id='UNIT') if vel_sk in set(sr.stream_parameters): self.assertEqual(set(sr.stream_parameters), {hourly_sk, met_sk, vel_sk}) else: self.assertEqual(set(sr.stream_parameters), {hourly_sk, met_sk, vel_sk2}) self.assertEqual(set(sr.unfulfilled), set([]))
def test_function_map_scalar(self): echo_fn = 'echo_sounding.nc' echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn), decode_times=False) echo_sk = StreamKey('RS01SLBS', 'LJ01A', '05-HPIESA101', 'streamed', 'echo_sounding') tr = TimeRange(0, 99999999) sr = StreamRequest(echo_sk, [], {}, tr, {}, request_id='UNIT') sr.datasets[echo_sk] = StreamDataset(echo_sk, sr.coefficients, sr.uflags, [], sr.request_id) sr.datasets[echo_sk]._insert_dataset(echo_ds) sr.calculate_derived_products() sr._add_location() expected = {'hpies_travel_time1_L1', 'hpies_travel_time2_L1', 'hpies_travel_time3_L1', 'hpies_travel_time4_L1', 'hpies_bliley_temperature_L1', 'hpies_pressure_L1'} missing = expected.difference(sr.datasets[echo_sk].datasets[0]) self.assertSetEqual(missing, set())
def test_pressure_depth_renamed(self): dosta_fn = 'dosta_abcdjm_cspp_instrument_recovered.nc' ctd_fn = 'ctdpf_j_cspp_instrument_recovered.nc' dosta_ds = xr.open_dataset(os.path.join(DATA_DIR, dosta_fn), decode_times=False) ctd_ds = xr.open_dataset(os.path.join(DATA_DIR, ctd_fn), decode_times=False) tr = TimeRange(dosta_ds.time.values[0], dosta_ds.time.values[-1]) sr = StreamRequest(self.dosta_sk, [], tr, {}, request_id='UNIT') dosta_ds = dosta_ds[ self.base_params + [p.name for p in sr.stream_parameters[self.dosta_sk]]] sr.datasets[self.dosta_sk] = StreamDataset(self.dosta_sk, sr.uflags, [self.ctd_sk2], sr.request_id) sr.datasets[self.ctd_sk2] = StreamDataset(self.ctd_sk2, sr.uflags, [self.dosta_sk], sr.request_id) sr.datasets[self.dosta_sk]._insert_dataset(dosta_ds) sr.datasets[self.ctd_sk2]._insert_dataset(ctd_ds) sr.calculate_derived_products() sr.import_extra_externals() sr.rename_parameters() self.assertNotIn('pressure_depth', sr.datasets[self.dosta_sk].datasets[1]) self.assertIn('pressure', sr.datasets[self.dosta_sk].datasets[1]) data = json.loads(JsonResponse(sr).json()) for each in data: self.assertNotIn('pressure_depth', each) self.assertIn('pressure', each)
def test_csv(self): nutnr_sk = StreamKey('CE04OSPS', 'SF01B', '4A-NUTNRA102', 'streamed', 'nutnr_a_sample') ctdpf_sk = StreamKey('CE04OSPS', 'SF01B', '2A-CTDPFA107', 'streamed', 'ctdpf_sbe43_sample') nutnr_fn = 'nutnr_a_sample.nc' ctdpf_fn = 'ctdpf_sbe43_sample.nc' cals = json.load(open(os.path.join(DATA_DIR, 'cals.json'))) tr = TimeRange(3.65342400e+09, 3.65351040e+09) coefficients = { k: [{ 'start': tr.start - 1, 'stop': tr.stop + 1, 'value': cals[k], 'deployment': 1 }] for k in cals } sr = StreamRequest(nutnr_sk, [2443], coefficients, tr, {}, request_id='UNIT') nutnr_ds = xr.open_dataset(os.path.join(DATA_DIR, nutnr_fn), decode_times=False) ctdpf_ds = xr.open_dataset(os.path.join(DATA_DIR, ctdpf_fn), decode_times=False) nutnr_ds = nutnr_ds[self.base_params + [p.name for p in sr.stream_parameters[nutnr_sk]]] ctdpf_ds = ctdpf_ds[self.base_params + [p.name for p in sr.stream_parameters[ctdpf_sk]]] sr.datasets[ctdpf_sk] = StreamDataset(ctdpf_sk, sr.coefficients, sr.uflags, [nutnr_sk], sr.request_id) sr.datasets[nutnr_sk] = StreamDataset(nutnr_sk, sr.coefficients, sr.uflags, [ctdpf_sk], sr.request_id) sr.datasets[ctdpf_sk]._insert_dataset(ctdpf_ds) sr.datasets[nutnr_sk]._insert_dataset(nutnr_ds) sr.calculate_derived_products() csv = CsvGenerator(sr, ',').to_csv() self.assertTrue(csv)
def test_function_map_scalar(self): echo_fn = 'echo_sounding.nc' echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn), decode_times=False) echo_sk = StreamKey('RS01SLBS', 'LJ01A', '05-HPIESA101', 'streamed', 'echo_sounding') tr = TimeRange(0, 99999999) sr = StreamRequest(echo_sk, [], {}, tr, {}, request_id='UNIT') sr.datasets[echo_sk] = StreamDataset(echo_sk, sr.coefficients, sr.uflags, [], sr.request_id) sr.datasets[echo_sk]._insert_dataset(echo_ds) sr.calculate_derived_products() sr._add_location() expected = { 'hpies_travel_time1_L1', 'hpies_travel_time2_L1', 'hpies_travel_time3_L1', 'hpies_travel_time4_L1', 'hpies_bliley_temperature_L1', 'hpies_pressure_L1' } missing = expected.difference(sr.datasets[echo_sk].datasets[0]) self.assertSetEqual(missing, set())
def test_botpt_15s(self): botpt_sk = StreamKey('RS03ECAL', 'MJ03E', '06-BOTPTA302', 'streamed', 'botpt_nano_sample') botpt_15s_sk = StreamKey('RS03ECAL', 'MJ03E', '06-BOTPTA302', 'streamed', 'botpt_nano_sample_15s') botpt_fn = 'deployment0001_RS03ECAL-MJ03E-06-BOTPTA302-streamed-botpt_nano_sample.nc' cals = json.load(open(os.path.join(DATA_DIR, 'cals.json'))) tr = TimeRange(3674160000.0, 3674181600.1) coefficients = { k: [{ 'start': tr.start - 1, 'stop': tr.stop + 1, 'value': cals[k], 'deployment': 1 }] for k in cals } sr = StreamRequest(botpt_15s_sk, [], coefficients, tr, {}, request_id='UNIT') botps_ds = xr.open_dataset(os.path.join(DATA_DIR, botpt_fn), decode_times=False) botps_ds = botps_ds[self.base_params + [p.name for p in sr.stream_parameters[botpt_sk]]] sr.datasets[botpt_sk] = StreamDataset(botpt_sk, sr.coefficients, sr.uflags, [], sr.request_id) sr.datasets[botpt_15s_sk] = StreamDataset(botpt_15s_sk, sr.coefficients, sr.uflags, [botpt_sk], sr.request_id) sr.datasets[botpt_sk]._insert_dataset(botps_ds) sr.calculate_derived_products() result = sr.datasets[botpt_15s_sk].datasets[1] self.assertIn('botsflu_time15s', result) self.assertIn('botsflu_meanpres', result) self.assertIn('botsflu_meandepth', result) self.assertIn('botsflu_5minrate', result) self.assertIn('botsflu_10minrate', result) self.assertIn('botsflu_predtide', result)
def test_add_location(self): echo_fn = 'echo_sounding.nc' echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn), decode_times=False) echo_ds.deployment.values[:20] = 1 echo_ds.deployment.values[20:] = 2 echo_sk = StreamKey('RS01SLBS', 'LJ01A', '05-HPIESA101', 'streamed', 'echo_sounding') location_info = { echo_sk.as_three_part_refdes(): [{ 'deployment': 1, 'lat': 1, 'lon': 5 }, { 'deployment': 2, 'lat': 2, 'lon': 6 }] } tr = TimeRange(0, 99999999) sr = StreamRequest(echo_sk, [], {}, tr, {}, location_information=location_info, request_id='UNIT') sr.datasets[echo_sk] = StreamDataset(echo_sk, sr.coefficients, sr.uflags, [], sr.request_id) sr.datasets[echo_sk]._insert_dataset(echo_ds) sr.calculate_derived_products() sr._add_location() ds = sr.datasets[echo_sk] for deployment, lat, lon in [(1, 1.0, 5.0), (2, 2.0, 6.0)]: lats = set(np.unique(ds.datasets[deployment].lat.values)) lons = set(np.unique(ds.datasets[deployment].lon.values)) self.assertSetEqual(lats, {lat}) self.assertSetEqual(lons, {lon})
def test_metbk_hourly(self): cals = { 'CC_lat': 40.13678333, 'CC_lon': -70.76978333, 'CC_depth_of_conductivity_and_temperature_measurements_m': 1.0668, 'CC_height_of_air_humidity_measurement_m': 4.2926, 'CC_height_of_air_temperature_measurement_m': 4.2926, 'CC_height_of_windspeed_sensor_above_sealevel_m': 4.7498, 'CC_jcool': 1, 'CC_jwarm': 1, 'CC_zinvpbl': 600, } metbk_fn = 'metbk_a_dcl_instrument_recovered.nc' metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, metbk_fn), decode_times=False) vel_fn = 'velpt_ab_dcl_instrument_recovered.nc' vel_ds = xr.open_dataset(os.path.join(DATA_DIR, vel_fn), decode_times=False) hourly_sk = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000', 'recovered_host', 'metbk_hourly') source_sk = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000', 'recovered_host', 'metbk_a_dcl_instrument_recovered') vel_sk = StreamKey('GI01SUMO', 'RID16', '04-VELPTA000', 'recovered_host', 'velpt_ab_dcl_instrument_recovered') tr = TimeRange(metbk_ds.time.values[0], metbk_ds.time.values[-1]) coefficients = {k: [{'start': tr.start-1000, 'stop': tr.stop+1000, 'value': cals[k], 'deployment': 3}] for k in cals} sr = StreamRequest(hourly_sk, [], coefficients, tr, {}, request_id='UNIT') metbk_ds = metbk_ds[self.base_params + [p.name for p in sr.stream_parameters[source_sk]]] vel_ds = vel_ds[self.base_params + [p.name for p in sr.stream_parameters[vel_sk]]] sr.datasets[source_sk] = StreamDataset(source_sk, sr.coefficients, sr.uflags, [hourly_sk, vel_sk], sr.request_id) sr.datasets[hourly_sk] = StreamDataset(hourly_sk, sr.coefficients, sr.uflags, [source_sk, vel_sk], sr.request_id) sr.datasets[vel_sk] = StreamDataset(vel_sk, sr.coefficients, sr.uflags, [hourly_sk, vel_sk], sr.request_id) sr.datasets[source_sk]._insert_dataset(metbk_ds) sr.datasets[vel_sk]._insert_dataset(vel_ds) sr.calculate_derived_products() expected_params = [p.name for p in hourly_sk.stream.parameters] + ['obs', 'time', 'deployment', 'lat', 'lon'] self.assertListEqual(sorted(expected_params), sorted(sr.datasets[hourly_sk].datasets[3]))
def test_add_location(self): echo_fn = 'echo_sounding.nc' echo_ds = xr.open_dataset(os.path.join(DATA_DIR, echo_fn), decode_times=False) echo_ds.deployment.values[:20] = 1 echo_ds.deployment.values[20:] = 2 echo_sk = StreamKey('RS01SLBS', 'LJ01A', '05-HPIESA101', 'streamed', 'echo_sounding') location_info = {echo_sk.as_three_part_refdes(): [{'deployment': 1, 'lat': 1, 'lon': 5}, {'deployment': 2, 'lat': 2, 'lon': 6}]} tr = TimeRange(0, 99999999) sr = StreamRequest(echo_sk, [], {}, tr, {}, location_information=location_info, request_id='UNIT') sr.datasets[echo_sk] = StreamDataset(echo_sk, sr.coefficients, sr.uflags, [], sr.request_id) sr.datasets[echo_sk]._insert_dataset(echo_ds) sr.calculate_derived_products() sr._add_location() ds = sr.datasets[echo_sk] for deployment, lat, lon in [(1, 1.0, 5.0), (2, 2.0, 6.0)]: lats = set(np.unique(ds.datasets[deployment].lat.values)) lons = set(np.unique(ds.datasets[deployment].lon.values)) self.assertSetEqual(lats, {lat}) self.assertSetEqual(lons, {lon})
def test_need_external(self): # nutnr_a_sample requests PD908 and PD911 tr = TimeRange(3617736678.149051, 3661524609.0570827) sr = StreamRequest(self.nut_sk, [], {}, tr, {}, request_id='UNIT') self.assertEqual(set(sr.stream_parameters), {self.ctd_sk, self.nut_sk})
def test_metbk_hourly(self): cals = { 'CC_lat': 40.13678333, 'CC_lon': -70.76978333, 'CC_depth_of_conductivity_and_temperature_measurements_m': 1.0668, 'CC_height_of_air_humidity_measurement_m': 4.2926, 'CC_height_of_air_temperature_measurement_m': 4.2926, 'CC_height_of_windspeed_sensor_above_sealevel_m': 4.7498, 'CC_jcool': 1, 'CC_jwarm': 1, 'CC_zinvpbl': 600, } metbk_fn = 'metbk_a_dcl_instrument_recovered.nc' metbk_ds = xr.open_dataset(os.path.join(DATA_DIR, metbk_fn), decode_times=False) vel_fn = 'velpt_ab_dcl_instrument_recovered.nc' vel_ds = xr.open_dataset(os.path.join(DATA_DIR, vel_fn), decode_times=False) hourly_sk = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000', 'recovered_host', 'metbk_hourly') source_sk = StreamKey('GI01SUMO', 'SBD11', '06-METBKA000', 'recovered_host', 'metbk_a_dcl_instrument_recovered') vel_sk = StreamKey('GI01SUMO', 'RID16', '04-VELPTA000', 'recovered_host', 'velpt_ab_dcl_instrument_recovered') tr = TimeRange(metbk_ds.time.values[0], metbk_ds.time.values[-1]) coefficients = { k: [{ 'start': tr.start - 1000, 'stop': tr.stop + 1000, 'value': cals[k], 'deployment': 3 }] for k in cals } sr = StreamRequest(hourly_sk, [], coefficients, tr, {}, request_id='UNIT') metbk_ds = metbk_ds[self.base_params + [p.name for p in sr.stream_parameters[source_sk]]] vel_ds = vel_ds[self.base_params + [p.name for p in sr.stream_parameters[vel_sk]]] sr.datasets[source_sk] = StreamDataset(source_sk, sr.coefficients, sr.uflags, [hourly_sk, vel_sk], sr.request_id) sr.datasets[hourly_sk] = StreamDataset(hourly_sk, sr.coefficients, sr.uflags, [source_sk, vel_sk], sr.request_id) sr.datasets[vel_sk] = StreamDataset(vel_sk, sr.coefficients, sr.uflags, [hourly_sk, vel_sk], sr.request_id) sr.datasets[source_sk]._insert_dataset(metbk_ds) sr.datasets[vel_sk]._insert_dataset(vel_ds) sr.calculate_derived_products() expected_params = [p.name for p in hourly_sk.stream.parameters ] + ['obs', 'time', 'deployment', 'lat', 'lon'] self.assertListEqual(sorted(expected_params), sorted(sr.datasets[hourly_sk].datasets[3]))
def test_bad_stream_key(self): with self.assertRaises(StreamEngineException): StreamRequest('bogus', None, None, None, None)
def test_empty_stream_key(self): with self.assertRaises(StreamEngineException): StreamRequest(None, None, None, None, None)
def test_basic_stream_request(self): sk = StreamKey('CP05MOAS', 'GL388', '03-CTDGVM000', 'recovered_host', 'ctdgv_m_glider_instrument_recovered') tr = TimeRange(3.622409e+09, 3.627058e+09) sr = StreamRequest(sk, [1527], {}, tr, {}, request_id='UNIT')