def get(self, fn, x): try: dbd = dbdreader.DBD(fn) v = dbd.get(x) except Exception as e: raise e return v
def test_G3S_data_file(self): print( "Reading a G3S data file for which the byte order needs to be swapped." ) dbd = dbdreader.DBD("../data/unit_887-2021-321-3-0.sbd", cacheDir='../data/cac') tm, depth = dbd.get("m_depth") self.assertAlmostEqual(depth.max(), 34.5, delta=0.1)
def test_non_standard_cache_dir_generate_cachefile(self): print("non_standard_cache_dir_generated_cachefile") # this should create a cache file. Remove it if already present. try: os.unlink('../data/cac/(813b137d.cac') except FileNotFoundError: pass dbd = dbdreader.DBD("../data/ammonite-2008-028-01-000.mbd", cacheDir='../data/cac') depth = dbd.get("m_depth") self.assertEqual(len(depth), 2)
def get_method(self, method, fn, x, *y): dbd = dbdreader.DBD(fn) try: if method == 'sync': v = dbd.get_sync(x, *y) elif method == 'xy': v = dbd.get_xy(x, *y) elif method == 'list': v = dbd.get_list(*y) except Exception as e: raise e return v
def get(self, fn, x): try: dbd = dbdreader.DBD(fn) v = dbd.get(x) except Exception as e: raise e finally: try: dbd.close() except: pass return v
def updateParameterList(self): # loop trough all files until one has a working parameterlist for fid in self.fileList: dbd = dbdreader.DBD(fid) if len(dbd.parameterNames) > 0: break # if there are parameters in the list break loop and use those if not dbd.parameterNames: # if no file contained proper parameter info print('------------------') print( '!!!! None of the provided files has sufficient parameter information' ) print('------------------') self.parameterList = dbd.parameterNames
def get_method(self, method, fn, x, y): dbd = dbdreader.DBD(fn) try: if method == 'sync': v = dbd.get_sync(x, y) elif method == 'xy': v = dbd.get_xy(x, y) elif method == 'list': v = dbd.get_list(y) except Exception as e: raise e finally: try: dbd.close() except: pass return v
def loadData(self): # initialize data structure self.Data = [] for i in range(len(self.varList) + 1): # +1 is for time dimension self.Data.append(np.array([])) # load data for f in range(len(self.fileList)): tmpdbd = dbdreader.DBD(self.fileList[f]) # just file name # check if variables are in the file: inorout = np.zeros((len(self.varList))) loadingList = [] for i in range(len(self.varList)): if self.varList[i] in tmpdbd.parameterNames: inorout[i] = 1 loadingList.append(self.varList[i]) #tmpdata = tmpdbd.get_sync( *self.varList ) tmpdata = tmpdbd.get_sync(*loadingList) cnt = 1 # index 0 is or the time vector self.Data[0] = np.concatenate( (self.Data[0], tmpdata[0])) # time vector for i in range(len(inorout)): if inorout[i] == 1: self.Data[i + 1] = np.concatenate( (self.Data[i + 1], tmpdata[cnt])) cnt += 1 else: # if the variable was not in the file fill with nans if self.varList[i] in ['file_no']: self.Data[i + 1] = np.concatenate( (self.Data[i + 1], np.ones(tmpdata[0].shape) * f)) else: self.Data[i + 1] = np.concatenate( (self.Data[i + 1], np.ones(tmpdata[0].shape) * np.nan)) # convert time data self.Data[0] = np.asarray( [dt.datetime.utcfromtimestamp(t) for t in self.Data[0]])
def test_missing_cache_file(self): print("Throw an error when cache file is missing...") with self.assertRaises(dbdreader.DbdError) as e: dbd = dbdreader.DBD("../data/unit_887-2021-321-3-0.sbd")
import numpy as np import gsw import dbdreader import ndf dbd = dbdreader.DBD( "/home/lucas/gliderdata/toulon_201504/hd/comet-2015-098-03-000.ebd") tmp = dbd.get_sync("sci_ctd41cp_timestamp", ["sci_water_temp", "sci_water_cond", "sci_water_pressure"]) t, tctd, T, C, P = np.compress(tmp[2] > 0, tmp, axis=1) SP = gsw.SP_from_C(C * 10, T, P * 10) SA = gsw.SA_from_SP(SP, P * 10, 43.1, 4.8) CT = gsw.CT_from_t(SA, T, P * 10) rho = gsw.rho(SA, CT, P * 10) i = np.argsort(rho) z_s = P[i] * 10 rho_is = rho[i] zi = np.arange(0, P.max() * 10, 2) rhoi = np.interp(zi, z_s, rho_is) data = ndf.NDF() data.add_parameter("rho", "kg/m^3", (zi, rhoi), "in situ density toulon exp.") data.save("toulon/density_profile_toulon.ndf")
#!/bin/python3 import os import sys import dbdreader if len(sys.argv)==1: raise ValueError("Supply one or more dbd/ebd files.") for fn in sys.argv[1:]: if not os.path.isfile(fn): print("Skipping %s..."%(fn)) continue try: dbd = dbdreader.DBD(fn) except: print("Skipping %s..."%(fn)) else: fileopen_time = dbd.get_fileopen_time() os.utime(fn, times=(fileopen_time, fileopen_time))
def test_non_standard_cache_dir(self): print("non_standard_cache_dir") dbd = dbdreader.DBD("../data/amadeus-2014-204-05-000.sbd", cacheDir='../data/cac') depth = dbd.get("m_depth") self.assertEqual(len(depth), 2)
def test_get_mission_name(self): print("get_mission_name") dbd = dbdreader.DBD("../data/amadeus-2014-204-05-000.sbd") t = dbd.get_mission_name() self.assertEqual(t, "micro.mi")
def test_file_open_time(self): print("file_open_time") dbd = dbdreader.DBD("../data/amadeus-2014-204-05-000.sbd") t = dbd.get_fileopen_time() self.assertEqual(t, 1406221414)
def test_get_list(self): print("get_list") dbd = dbdreader.DBD("../data/amadeus-2014-204-05-000.sbd") x, y = dbd.get_list("m_lat", "m_lon")
def test_get_sync_obselete(self): print("get_sync_obselete") dbd = dbdreader.DBD("../data/amadeus-2014-204-05-000.sbd") x = dbd.get_sync("m_depth", ['m_lat', 'm_lon'])
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import numpy as np import dbdreader # open a given file dbd = dbdreader.DBD("../data/amadeus-2014-204-05-000.sbd") # print what parameters are available: for i, p in enumerate(dbd.parameterNames): print("%2d: %s" % (i, p)) # get the measured depth tm, depth = dbd.get("m_depth") max_depth = depth.max() print("\nmax depth %f m" % (max_depth)) # get lat lon lat, lon = dbd.get_xy("m_lat", "m_lon") # interpolate roll speed on depth time tm, depth, roll, speed = dbd.get_sync("m_depth", ["m_roll", "m_speed"])
import numpy as np import dbdreader # open a given file # Note that the default location for cache files ($HOME/.dbdreader) is # overriden. dbd = dbdreader.DBD("../data/amadeus-2014-204-05-000.sbd", cacheDir='../data/cac') # print what parameters are available: for i, p in enumerate(dbd.parameterNames): print("%2d: %s" % (i, p)) # get the measured depth tm, depth = dbd.get("m_depth") max_depth = depth.max() print("\nmax depth %f m" % (max_depth)) # get lat lon lat, lon = dbd.get_xy("m_lat", "m_lon") # interpolate roll speed on depth time tm, depth, roll, speed = dbd.get_sync("m_depth", "m_roll", "m_speed") print("\nmax speed %f m/s" % (np.nanmax(speed))) # close the file again. dbd.close()
import dbdreader dbd = dbdreader.DBD( "/home/lucas/gliderdata/helgoland201407/hd/sebastian-2014-227-00-160.dbd") depth = dbd.get("m_depth") print(depth[0].shape) print(depth[0].mean())