def test_spectra(self): """ Test reading binary file containing the raw spectra together with the text file. """ d = Dataset(tempfile.mktemp(), 'w') fin_txt = os.path.join(self.data_dir, 'TOFP04', '2017_06_14_0930.txt') fin_bin = os.path.join(self.data_dir, 'TOFP04', '2017_06_14_0930.bin') fin_high = os.path.join(self.data_dir, 'TOFP04', 'Cal_20170602_0956_high.bin') fin_low = os.path.join(self.data_dir, 'TOFP04', 'Cal_20170602_0956_low.bin') fin_dark = os.path.join(self.data_dir, 'TOFP04', 'Cal_20170602_0956_dark.bin') fin_ref = os.path.join(self.data_dir, 'TOFP04', 'Cal_20170602_0956_ref.bin') x = [521, 637, 692, 818] y = [305., 315., 319.5, 330.] f = interp1d(x, y, fill_value='extrapolate') xnew = list(range(0, 2048)) wavelengths = f(xnew) e = d.read(fin_txt, spectra=fin_bin, wavelengths=wavelengths, ftype='flyspec', timeshift=12.0) self.assertEqual(e['RawDataBuffer'].d_var.shape, (1321, 2048)) rdtb = e['RawDataTypeBuffer'] rdt = d.new(rdtb) rb = e['RawDataBuffer'] rb.type = rdt r = d.new(rb) cb = e['ConcentrationBuffer'] rdlist = [r] for _f in [fin_high, fin_low, fin_dark, fin_ref]: e = d.read(_f, ftype='flyspecref', wavelengths=wavelengths, type=_f.replace('fin_', '')) rdtb = e['RawDataTypeBuffer'] rdt = d.new(rdtb) rb = e['RawDataBuffer'] rb.type = rdt r = d.new(rb) rdlist.append(r) cb.rawdata = rdlist c = d.new(cb) for _r in c.rawdata[:]: if _r.type.name[0] == 'measurement': break if False: with tempfile.TemporaryFile() as fd: plot(_r, savefig=fd) expected_image = os.path.join(self.data_dir, 'raw_data_plot.png') rms = self.compare_images(fd, expected_image) self.assertTrue(rms <= 0.001)
def test_utf8(self): """ Test on handling non-utf8 entries. """ d = Dataset(tempfile.mktemp(), 'w') with self.assertRaises(MiniDoasException): d.read(os.path.join(self.data_dir, 'SR_20170328_non_utf8_line.csv'), ftype='minidoas-raw')
def test_date_bug(self): d = Dataset(tempfile.mktemp(), 'w') fin = os.path.join(self.data_dir, 'gns_wind_model_data_ukmo_20160127_0630.txt') d.read(fin, ftype='NZMETSERVICE') gf = d.elements['GasFlow'][0] res = get_wind_speed(gf, 174.735, -36.890, 1000, '2016-01-27T06:00:00+13:00') (lon, lat, hght, time, vx, vx_error, vy, vy_error, vz, vz_error, dist) = res self.assertEqual(dist, 0.0)
def test_read(self): d = Dataset(tempfile.mktemp(), 'w') e = d.read(os.path.join(self.data_dir, '2012_02_29_1340_CHILE.txt'), ftype='FLYSPEC') r = d.new(e['RawDataBuffer']) cb = e['ConcentrationBuffer'] cb.rawdata = [r] c = d.new(cb) r = d.elements['RawData'][0] self.assertEqual(sum([x.size for x in r.datetime]), 4600) self.assertEqual(r.inc_angle[0], 174.750) c = d.elements['Concentration'][0] r1 = c.rawdata[0] self.assertEqual(len(c.value[:]), 4600) np.testing.assert_array_almost_equal(r1.position[0], [-67.8047, -23.3565, 3927.], 2) # dicretize all retrievals onto a grid to show a daily plot bins = np.arange(0, 180, 1.0) m = [] for _angle, _so2 in split_by_scan(r1.inc_angle[:], c.value[:]): _so2_binned = binned_statistic(_angle, _so2, 'mean', bins) m.append(_so2_binned.statistic) m = np.array(m) ids = np.argmax(np.ma.masked_invalid(m), axis=1) maxima = np.array([ 166., 167., 167., 167., 168., 167., 168., 167., 167., 167., 167., 167., 168., 167., 167., 167., 167., 166., 167., 166., 166., 167., 165., 165., 165., 164., 165., 163., 163., 164., 163., 165., 164., 164., 164., 161. ]) np.testing.assert_array_almost_equal(maxima, bins[ids], 2) d1 = Dataset(tempfile.mktemp(), 'w') e = d1.read(os.path.join(self.data_dir, '2016_06_11_0830_TOFP04.txt'), ftype='FLYSPEC', timeshift=12.0) r = d1.new(e['RawDataBuffer']) cb = e['ConcentrationBuffer'] cb.rawdata = [r] d1.new(cb) c = d1.elements['Concentration'][0] r = c.rawdata[0] m = [] for _angle, _so2 in split_by_scan(r.inc_angle[:], c.value[:]): _so2_binned = binned_statistic(_angle, _so2, 'mean', bins) m.append(_so2_binned.statistic) m = np.array(m) ids = np.argmax(np.ma.masked_invalid(m), axis=1) maxima = np.array( [147., 25., 27., 86., 29., 31., 27., 27., 28., 137., 34., 34.]) np.testing.assert_array_almost_equal(maxima, bins[ids], 2)
def main(rootdir): for root, dirs, files in os.walk(rootdir): for f in files: match = re.match('gns_wind_model_data_ecmwf_(\d+)_(\d+).txt', f) if match: msg = "reading " + os.path.join(root, f) msg += " for day " + match.group(1) print(msg) d = Dataset(tempfile.mktemp(), 'w') try: d.read(os.path.join(root, f), ftype='NZMETSERVICE') write_testfile(d, match.group(1), match.group(2), os.path.join(root, f)) except NZMetservicePluginException as e: print(e) del d
def test_read(self): d = Dataset(tempfile.mktemp(), 'w') e = d.read(os.path.join(self.data_dir, 'minidoas', 'NE_20161101.csv'), ftype='minidoas-raw') rb = e['RawDataBuffer'] self.assertEqual(rb.d_var.shape, (7615, 482)) self.assertEqual(rb.d_var[0, 0], 78) self.assertEqual(rb.datetime[0], np.datetime64('2016-11-01T09:00:00.070')) self.assertEqual(rb.datetime[-1], np.datetime64('2016-11-01T16:29:54.850')) with self.assertRaises(MiniDoasException): e1 = d.read(os.path.join(self.data_dir, 'minidoas', 'NE_2016_11_01_Spectra.csv'), ftype='minidoas-spectra') e1 = d.read(os.path.join(self.data_dir, 'minidoas', 'NE_2016_11_01_Spectra.csv'), date='2016-11-01', ftype='minidoas-spectra', timeshift=13) cb = e1['ConcentrationBuffer'] self.assertEqual(cb.datetime[-1], np.datetime64('2016-11-01T03:28:07.410')) fn_wd = os.path.join(self.data_dir, 'minidoas', 'wind', '20161101_WD_00.txt') fn_ws = os.path.join(self.data_dir, 'minidoas', 'wind', '20161101_WS_00.txt') e2 = d.read({'direction': fn_wd, 'speed': fn_ws}, timeshift=13, ftype='minidoas-wind') gfb = e2['GasFlowBuffer'] self.assertEqual(int(vec2bearing(gfb.vx[0], gfb.vy[0])), 240) self.assertAlmostEqual(np.sqrt(gfb.vx[0]**2 + gfb.vy[0]**2), 3.2, 1) self.assertEqual(gfb.datetime[0], np.datetime64("2016-10-31T19:10:00.000")) e3 = d.read(os.path.join(self.data_dir, 'minidoas', 'XX_2016_11_01_Combined.csv'), date='2016-11-01', ftype='minidoas-scan', station='NE', timeshift=13) fb = e3['FluxBuffer'] np.testing.assert_array_almost_equal(fb.value[:], np.array([3.8, 1.2]), 1) self.assertEqual(fb.datetime[0], np.datetime64('2016-10-31T23:15:04'))
def test_missing_entries(self): """ Test on handling missing entries. """ d = Dataset(tempfile.mktemp(), 'w') e = d.read(os.path.join(self.data_dir, 'SR_20160530_missing_entries.csv'), date='2016-05-30', ftype='minidoas-scan') fb = e['FluxBuffer'] self.assertEqual(fb.value.size, 12)
def test_read_flux(self): d = Dataset(tempfile.mktemp(), 'w') fin = os.path.join(self.data_dir, 'TOFP04', 'TOFP04_2017_06_14.txt') e = d.read(fin, ftype='flyspecflux', timeshift=13.0) nlines = None with open(fin) as fd: nlines = len(fd.readlines()) self.assertEqual(e['FluxBuffer'].value.shape, (nlines - 1, )) fb = e['FluxBuffer'] self.assertEqual(fb.datetime[-1], np.datetime64('2017-06-14T03:29:38.033000'))
def test_read_wind(self): d = Dataset(tempfile.mktemp(), 'w') fin = os.path.join(self.data_dir, 'TOFP04', 'wind', '2017_06_14.txt') gf = d.read(fin, ftype='flyspecwind', timeshift=13) vx = gf.vx[0] vy = gf.vy[0] dt = gf.datetime[0] v = np.sqrt(vx * vx + vy * vy) self.assertAlmostEqual(v, 10.88, 2) self.assertAlmostEqual(vec2bearing(vx, vy), 255, 6) self.assertEqual(dt, np.datetime64('2017-06-13T17:00:00'))
def test_read_refspec(self): d = Dataset(tempfile.mktemp(), 'w') x = [521, 637, 692, 818] y = [305., 315., 319.5, 330.] f = interp1d(x, y, fill_value='extrapolate') xnew = list(range(0, 2048)) wavelengths = f(xnew) with self.assertRaises(FlySpecPluginException): e = d.read(os.path.join(self.data_dir, 'TOFP04', 'Cal_20170602_0956_dark.bin'), ftype='FLYSPECREF', wavelengths=wavelengths) e = d.read(os.path.join(self.data_dir, 'TOFP04', 'Cal_20170602_0956_dark.bin'), ftype='FLYSPECREF', type='dark', wavelengths=wavelengths) self.assertEqual(e['RawDataBuffer'].d_var.shape, (10, 2048))
def test_add(self): d1 = Dataset(tempfile.mktemp(), 'w') e = d1.read(os.path.join(self.data_dir, '2016_06_11_0830_TOFP04.txt'), ftype='FLYSPEC', timeshift=12.0) r = d1.new(e['RawDataBuffer']) cb = e['ConcentrationBuffer'] cb.rawdata = [r] d1.new(cb) d2 = Dataset(tempfile.mktemp(), 'w') e = d2.read(os.path.join(self.data_dir, '2016_06_11_0900_TOFP04.txt'), ftype='FLYSPEC', timeshift=12.0) r = d2.new(e['RawDataBuffer']) cb = e['ConcentrationBuffer'] cb.rawdata = [r] d2.new(cb) d1 += d2 self.assertEqual(len(d1.elements['Concentration']), 2) self.assertEqual(len(d1.elements['RawData']), 2)
def test_read(self): d = Dataset(tempfile.mktemp(), 'w') d.read(os.path.join(self.data_dir, 'gns_wind_model_data_ecmwf_20160921_0630.txt'), ftype='NZMETSERVICE') gf = d.elements['GasFlow'][0] res = get_wind_speed(gf, 174.735, -36.890, 1000, '2016-09-21T06:00:00+12:00') (lon, lat, hght, time, vx, vx_error, vy, vy_error, vz, vz_error, dist) = res self.assertEqual(lon, 174.735) self.assertEqual(lat, -36.890) self.assertEqual(hght, 1000) v = math.sqrt(vx * vx + vy * vy) self.assertAlmostEqual(v / 0.514444, 17, 6) self.assertAlmostEqual(70., vec2bearing(vx, vy), 6) m = gf.methods[0] self.assertEqual(m.name, 'gfs') d.read(os.path.join(self.data_dir, 'gns_wind_model_data_ecmwf_20160921_0630.txt'), ftype='NZMETSERVICE', preferred_model='ecmwf') gf1 = d.elements['GasFlow'][1] res = get_wind_speed(gf1, 174.755, -36.990, 1000, '2016-09-21T06:00:00+12:00') (lon, lat, hght, time, vx, vx_error, vy, vy_error, vz, vz_error, dist) = res v = math.sqrt(vx * vx + vy * vy) self.assertEqual(lon, 174.735) self.assertEqual(lat, -36.890) self.assertEqual(hght, 1000) v = math.sqrt(vx * vx + vy * vy) self.assertAlmostEqual(v / 0.514444, 19, 6) self.assertAlmostEqual(65., vec2bearing(vx, vy), 6) self.assertEqual(gf1.methods[0].name, 'ecmwf') self.assertEqual(gf1.unit, 'm/s')
def test_empty_model(self): d = Dataset(tempfile.mktemp(), 'w') with self.assertRaises(NZMetservicePluginException): d.read(os.path.join(self.data_dir, 'gns_wind_model_data_ecmwf_20141007_1830.txt'), ftype='NZMETSERVICE') d.read(os.path.join(self.data_dir, 'gns_wind_model_data_ecmwf_20141007_1830.txt'), ftype='NZMETSERVICE', preferred_model='gfs') d1 = Dataset(tempfile.mktemp(), 'w') with self.assertRaises(NZMetservicePluginException): filename = 'gns_wind_model_data_ecmwf_20141228_0630.txt' d1.read(os.path.join(self.data_dir, filename), ftype='NZMETSERVICE', preferred_model='ecmwf') d1.read(os.path.join(self.data_dir, 'gns_wind_model_data_ecmwf_20141228_0630.txt'), ftype='NZMETSERVICE')
def test_wind(self): """ Test handling of wind data files with different numbers of entries for wind speed and wind direction. """ d = Dataset(tempfile.mktemp(), 'w') windd_filepath = os.path.join(self.data_dir, '20170515_WD_00.txt') winds_filepath = os.path.join(self.data_dir, '20170515_WS_00.txt') e = d.read({'direction': windd_filepath, 'speed': winds_filepath}, ftype='minidoas-wind', timeshift=13) gfb = e['GasFlowBuffer'] dates = gfb.datetime[:].astype('datetime64[s]') dates += np.timedelta64(13, 'h') self.assertEqual(dates.size, 5) self.assertEqual(int(vec2bearing(gfb.vx[0], gfb.vy[0])), 240) self.assertAlmostEqual(np.sqrt(gfb.vx[0]**2 + gfb.vy[0]**2), 2.5, 1) self.assertEqual(int(vec2bearing(gfb.vx[-1], gfb.vy[-1])), 240) self.assertAlmostEqual(np.sqrt(gfb.vx[-1]**2 + gfb.vy[-1]**2), 4.2, 1)
def test_plot(self): d = Dataset(tempfile.mktemp(), 'w') e = d.read(os.path.join(self.data_dir, '2012_02_29_1340_CHILE.txt'), ftype='FLYSPEC', timeshift=12.0) rdt = d.new(e['RawDataTypeBuffer']) rb = e['RawDataBuffer'] rb.type = rdt r = d.new(rb) cb = e['ConcentrationBuffer'] cb.rawdata = [r] cb.rawdata_indices = np.arange(cb.value.shape[0]) c = d.new(cb) if False: with tempfile.TemporaryFile() as fd: plot(c, savefig=fd, timeshift=12.0) expected_image = os.path.join(self.data_dir, 'chile_retrievals_overview.png') rms = self.compare_images(fd, expected_image) self.assertTrue(rms <= 0.001)
def main(datapath, outputpath, start, end, pg=True, deletefiles=False): msg = "Data path is: {}\n".format(datapath) msg += "Output path is: {}\n".format(outputpath) msg += "Start date: {}\n".format(start) msg += "End date: {}\n".format(end) logging.info(msg) dates = pd.date_range(start=start, end=end, freq='D') if pg: ndays = len(dates) bar = Bar('Processing', max=ndays) for date in dates: if pg: bar.next() else: print(date) outputfile = 'MiniDOAS_{:d}{:02d}{:02d}.h5'.format( date.year, date.month, date.day) h5file = os.path.join(outputpath, outputfile) if True: d = Dataset(h5file, 'w') # ToDo: get correct plume coordinates tb = TargetBuffer(name='White Island main plume', target_id='WI001', position=[177.18375770, -37.52170799, 321.0]) t = d.new(tb) wpoptions = "{'Pixel316nm':479, 'TrimLower':30, 'LPFilterCount':3," wpoptions += "'MinWindSpeed':3,'BrightEnough':500, 'BlueStep':5, " wpoptions += "'MinR2:0.8, 'MaxFitCoeffError':50.0, " wpoptions += "'InPlumeThresh':0.05, 'MinPlumeAngle':0.1, " wpoptions += "'MaxPlumeAngle':3.0, 'MinPlumeSect':0.4, " wpoptions += "'MaxPlumeSect':2.0, 'MeanPlumeCtrHeight':310, " wpoptions += "'SEMeanPlumeCtrHeight':0.442, " wpoptions += " 'MaxRangeSeperation':5000, 'MaxRangeToPlume':5000, " wpoptions += " 'MaxPlumeWidth':2600'MaxPlumeCentreAltitude':2000, " wpoptions += "'MaxAltSeperation':1000, 'MaxTimeDiff':30," wpoptions += "'MinTriLensAngle':0.1745, 'MaxTriLensAngle':2.9671," wpoptions += "'SEWindSpeed':0.20, 'WindMultiplier':1.24, " wpoptions += "'SEWindDir':0.174}" mb1 = MethodBuffer(name='WidPro v1.2', description='Jscript wrapper for DOASIS', settings=wpoptions) m1 = d.new(mb1) station_info = {} location_name = 'White Island North-East Point' station_info['WI301'] = { 'files': {}, 'stationID': 'WI301', 'stationLoc': location_name, 'target': t, 'bearing': 6.0214, 'lon': 177.192979384, 'lat': -37.5166903535, 'elev': 49.0, 'widpro_method': m1, 'wp_station_id': 'NE' } station_info['WI302'] = { 'files': {}, 'stationID': 'WI302', 'stationLoc': 'White Island South Rim', 'target': t, 'bearing': 3.8223, 'lon': 177.189013316, 'lat': -37.5265334424, 'elev': 96.0, 'widpro_method': m1, 'wp_station_id': 'SR' } for station in ['WI301', 'WI302']: # Find the raw data raw_data_filename = "{:s}_{:d}{:02d}{:02d}.zip" station_id = station_info[station]['wp_station_id'] raw_data_filename = raw_data_filename.format( station_id, date.year, date.month, date.day) raw_data_filepath = os.path.join(datapath, 'spectra', station_id, raw_data_filename) if os.path.isfile(raw_data_filepath): try: with ZipFile(raw_data_filepath) as myzip: myzip.extractall('/tmp') except: msg = "ERROR 05: Can't unzip file {}" logging.error(msg.format(raw_data_filepath)) raw_data_filepath = None else: raw_data_filename = raw_data_filename.replace( '.zip', '.csv') raw_data_filepath = os.path.join( '/tmp', raw_data_filename) else: logging.error( "file {} does not exist".format(raw_data_filepath)) continue try: if not is_file_OK(raw_data_filepath): raw_data_filepath = None except Exception as e: print(raw_data_filepath) raise (e) station_info[station]['files']['raw'] = raw_data_filepath # Find the concentration data monthdir = '{:d}-{:02d}'.format(date.year, date.month) spectra_filename = "{:s}_{:d}_{:02d}_{:02d}_Spectra.csv" spectra_filename = spectra_filename.format( station_id, date.year, date.month, date.day) spectra_filepath = os.path.join(datapath, 'results', monthdir, spectra_filename) if not is_file_OK(spectra_filepath): spectra_filepath = None station_info[station]['files']['spectra'] = spectra_filepath # Find the flux data flux_ah_filename = spectra_filename.replace( 'Spectra.csv', 'Scans.csv') flux_ah_filepath = os.path.join(datapath, 'results', monthdir, flux_ah_filename) if not is_file_OK(flux_ah_filepath): flux_ah_filepath = None station_info[station]['files']['flux_ah'] = flux_ah_filepath flux_ch_filename = "XX_{:d}_{:02d}_{:02d}_Combined.csv" flux_ch_filename = flux_ch_filename.format( date.year, date.month, date.day) flux_ch_filepath = os.path.join(datapath, 'results', monthdir, flux_ch_filename) if not is_file_OK(flux_ch_filepath): flux_ch_filepath = None station_info[station]['files']['flux_ch'] = flux_ch_filepath fits_flux_ah, fits_flux_ch = FITS_download(date, station) station_info[station]['files']['fits_flux_ah'] = fits_flux_ah station_info[station]['files']['fits_flux_ch'] = fits_flux_ch try: read_single_station(d, station_info[station], date) except MiniDoasException as e: logging.error(str(e)) fn = raw_data_filename.replace('.zip', '.csv') fn = os.path.join('/tmp', fn) if fn is not None and os.path.isfile(fn): os.remove(fn) # Wind data windd_dir = os.path.join(datapath, 'wind', 'direction') winds_dir = os.path.join(datapath, 'wind', 'speed') sub_dir = '{:02d}-{:02d}'.format(date.year - 2000, date.month) winds_filename = '{:d}{:02d}{:02d}_WS_00.txt'.format( date.year, date.month, date.day) windd_filename = winds_filename.replace('WS', 'WD') winds_filepath = os.path.join(winds_dir, sub_dir, winds_filename) windd_filepath = os.path.join(windd_dir, sub_dir, windd_filename) if is_file_OK(winds_filepath) and is_file_OK(windd_filepath): # Read in the raw wind data; this is currently # not needed to reproduce flux estimates so it's # just stored for reference e = d.read( { 'direction': windd_filepath, 'speed': winds_filepath }, ftype='minidoas-wind', timeshift=13) gfb = e['GasFlowBuffer'] gf = d.new(gfb) d.close() try: verify_flux(os.path.join(outputpath, outputfile), 1.) except MDOASException as e: msg = str(e) logging.error(msg) if deletefiles: if h5file is not None and os.path.isfile(h5file): os.remove(h5file) for station in ['WI301', 'WI302']: files = [ station_info[station]['files']['raw'], station_info[station]['files']['fits_flux_ah'], station_info[station]['files']['fits_flux_ch'] ] for _f in files: if _f is not None and os.path.isfile(_f): os.remove(_f) if pg: bar.finish()
def test_readabunch(self): """ Read in a whole day's worth of data including the reference spectra, the flux results, and the wind data. """ def keyfunc(fn): date = os.path.basename(fn).split('.')[0] year, month, day, hourmin = date.split('_') return datetime.datetime(int(year), int(month), int(day), int(hourmin[0:2]), int(hourmin[2:])) # Reference spectra fin_high = os.path.join(self.data_dir, 'TOFP04', 'Cal_20170602_0956_high.bin') fin_low = os.path.join(self.data_dir, 'TOFP04', 'Cal_20170602_0956_low.bin') fin_dark = os.path.join(self.data_dir, 'TOFP04', 'Cal_20170602_0956_dark.bin') fin_ref = os.path.join(self.data_dir, 'TOFP04', 'Cal_20170602_0956_ref.bin') bearing = 285. x = [521, 637, 692, 818] y = [305., 315., 319.5, 330.] f = interp1d(x, y, fill_value='extrapolate') xnew = list(range(0, 2048)) wavelengths = f(xnew) d = Dataset(tempfile.mktemp(), 'w') ib = InstrumentBuffer(location='Te Maari crater', type='FlySpec', name='TOFP04') inst = d.new(ib) tb = TargetBuffer(name='Upper Te Maari crater', position=[175.671854359, -39.107850505, 1505.]) t = d.new(tb) rdlist = [] for _k, _f in zip(['high', 'low', 'dark', 'ref'], [fin_high, fin_low, fin_dark, fin_ref]): e = d.read(_f, ftype='flyspecref', wavelengths=wavelengths, type=_k) rdtb = e['RawDataTypeBuffer'] rdt = d.new(rdtb) rb = e['RawDataBuffer'] rb.type = rdt rb.instrument = inst r = d.new(rb) rdlist.append(r) files = glob.glob(os.path.join(self.data_dir, 'TOFP04', '2017*.txt')) files = sorted(files, key=keyfunc) r = None c = None nlines = 0 last_index = 0 for _f in files: try: fin_bin = _f.replace('.txt', '.bin') with open(_f) as fd: nlines += len(fd.readlines()) e = d.read(_f, ftype='FLYSPEC', spectra=fin_bin, wavelengths=wavelengths, bearing=bearing, timeshift=12) if r is None and c is None: rdt = d.new(e['RawDataTypeBuffer']) rb = e['RawDataBuffer'] rb.type = rdt rb.instrument = inst rb.target = t r = d.new(rb) cb = e['ConcentrationBuffer'] rdlist.append(r) cb.rawdata = rdlist cb.rawdata_indices = np.arange(cb.value.shape[0]) last_index = cb.value.shape[0] - 1 c = d.new(cb) else: r.append(e['RawDataBuffer']) cb = e['ConcentrationBuffer'] cb.rawdata_indices = (last_index + 1 + np.arange(cb.value.shape[0])) last_index = last_index + cb.value.shape[0] c.append(cb) except Exception as ex: print((ex, _f, fin_bin)) continue # Check all data has been read self.assertEqual(c.rawdata[4].d_var.shape, (nlines, 2048)) self.assertEqual(c.rawdata[4].inc_angle.shape, (nlines, )) self.assertEqual(c.value[0], 119.93) self.assertEqual(c.value[-1], 23.30) self.assertEqual(c.rawdata[4].datetime[-1], np.datetime64('2017-06-14T04:30:00.535')) self.assertEqual(c.rawdata[4].datetime[0], np.datetime64('2017-06-13T20:30:49.512')) if False: with tempfile.TemporaryFile() as fd: plot(c, savefig=fd) expected_image = os.path.join(self.data_dir, 'TOFP04', 'concentration_plot.png') rms = self.compare_images(fd, expected_image) self.assertTrue(rms <= 0.001) with tempfile.TemporaryFile() as fd: plot(c.rawdata[0], savefig=fd) expected_image = os.path.join(self.data_dir, 'TOFP04', 'ref_spectrum.png') rms = self.compare_images(fd, expected_image) self.assertTrue(rms <= 0.001) fe = d.read(os.path.join(self.data_dir, 'TOFP04', 'TOFP04_2017_06_14.txt'), ftype='flyspecflux', timeshift=12) gf = d.read(os.path.join(self.data_dir, 'TOFP04', 'wind', '2017_06_14.txt'), ftype='flyspecwind', timeshift=12) fb = fe['FluxBuffer'] draw = r.datetime[:].astype('datetime64[us]') inds = [] for i in range(fb.value.shape[0]): d0 = fb.datetime[i].astype('datetime64[us]') idx0 = np.argmin(abs(draw - d0)) if i < fb.value.shape[0] - 1: d1 = fb.datetime[i + 1].astype('datetime64[us]') idx1 = np.argmin(abs(draw - d1)) # There is a small bug in Nial's program that gets # the start of the final scan in a file wrong if r.inc_angle[idx1 - 1] < r.inc_angle[idx1]: idx1 -= 1 fb.datetime[i + 1] = r.datetime[idx1] else: idx1 = r.datetime.shape[0] inds.append([idx0, idx1 - 1]) fb.concentration_indices = inds fb.concentration = c mb = fe['MethodBuffer'] m = d.new(mb) fb.method = m fb.gasflow = gf f = d.new(fb) nos = 18 i0, i1 = f.concentration_indices[nos] cn = f.concentration rn = cn.rawdata[4] self.assertAlmostEqual(f.value[nos], 0.62, 2) self.assertEqual(rn.inc_angle[i0], 25.) self.assertEqual(rn.inc_angle[i1], 150.) self.assertEqual(f.datetime[nos], np.datetime64('2017-06-13T21:20:17.196000')) pfb = PreferredFluxBuffer(fluxes=[f], flux_indices=[[nos]], value=[f.value[nos]], datetime=[f.datetime[nos]]) d.new(pfb)
def test_readall(self): """ Produce a complete HDF5 file for 1 day of MiniDOAS analysis at one station. """ d = Dataset(tempfile.mktemp(), 'w') # ToDo: get correct plume coordinates tb = TargetBuffer(name='White Island main plume', target_id='WI001', position=[177.18375770, -37.52170799, 321.0]) t = d.new(tb) wpoptions = "{'Pixel316nm':479, 'TrimLower':30, 'LPFilterCount':3," wpoptions += "'MinWindSpeed':3, 'BrightEnough':500, 'BlueStep':5," wpoptions += "'MinR2:0.8, 'MaxFitCoeffError':50.0," wpoptions += "'InPlumeThresh':0.05, 'MinPlumeAngle':0.1," wpoptions += "'MaxPlumeAngle':3.0, 'MinPlumeSect':0.4," wpoptions += "'MaxPlumeSect':2.0, 'MeanPlumeCtrHeight':310," wpoptions += "'SEMeanPlumeCtrHeight':0.442, 'MaxRangeToPlume':5000," wpoptions += "'MaxPlumeWidth':2600, 'MaxPlumeCentreAltitude':2000," wpoptions += "'MaxRangeSeperation':5000, 'MaxAltSeperation':1000," wpoptions += "'MaxTimeDiff':30, 'MinTriLensAngle':0.1745," wpoptions += "'MaxTriLensAngle':2.9671, 'SEWindSpeed':0.20," wpoptions += "'WindMultiplier':1.24, 'SEWindDir':0.174}" mb1 = MethodBuffer(name='WidPro v1.2', description='Jscript wrapper for DOASIS', settings=wpoptions) m1 = d.new(mb1) # Read in the raw wind data; this is currently not needed to reproduce # flux estimates so it's just stored for reference fn_wd = os.path.join(self.data_dir, 'minidoas', 'wind', '20161101_WD_00.txt') fn_ws = os.path.join(self.data_dir, 'minidoas', 'wind', '20161101_WS_00.txt') e2 = d.read({'direction': fn_wd, 'speed': fn_ws}, ftype='minidoas-wind', timeshift=13) gfb = e2['GasFlowBuffer'] d.new(gfb) station_info = {} files = {'raw': os.path.join(self.data_dir, 'minidoas', 'NE_20161101.csv'), 'spectra': os.path.join(self.data_dir, 'minidoas', 'NE_2016_11_01_Spectra.csv'), 'flux_ah': os.path.join(self.data_dir, 'minidoas', 'NE_2016_11_01_Scans.csv'), 'flux_ch': os.path.join(self.data_dir, 'minidoas', 'XX_2016_11_01_Combined.csv'), 'fits_flux_ah': os.path.join(self.data_dir, 'minidoas', 'FITS_NE_20161101_ah.csv'), 'fits_flux_ch': os.path.join(self.data_dir, 'minidoas', 'FITS_NE_20161101_ch.csv')} station_info['WI301'] = {'files': files, 'stationID': 'WI301', 'stationLoc': 'White Island North-East Point', 'target': t, 'bearing': 6.0214, 'lon': 177.192979384, 'lat': -37.5166903535, 'elev': 49.0, 'widpro_method': m1, 'wp_station_id': 'NE'} files = {'raw': os.path.join(self.data_dir, 'minidoas', 'SR_20161101.csv'), 'spectra': os.path.join(self.data_dir, 'minidoas', 'SR_2016_11_01_Spectra.csv'), 'flux_ah': os.path.join(self.data_dir, 'minidoas', 'SR_2016_11_01_Scans.csv'), 'flux_ch': os.path.join(self.data_dir, 'minidoas', 'XX_2016_11_01_Combined.csv'), 'fits_flux_ah': os.path.join(self.data_dir, 'minidoas', 'FITS_SR_20161101_ah.csv'), 'fits_flux_ch': os.path.join(self.data_dir, 'minidoas', 'FITS_SR_20161101_ch.csv')} station_info['WI302'] = {'files': files, 'stationID': 'WI302', 'stationLoc': 'White Island South Rim', 'target': t, 'bearing': 3.8223, 'lon': 177.189013316, 'lat': -37.5265334424, 'elev': 96.0, 'widpro_method': m1, 'wp_station_id': 'SR'} self.read_single_station(d, station_info['WI301']) self.read_single_station(d, station_info['WI302']) d.close()