Ejemplo n.º 1
0
def texture2(humfile, sonpath, win, doplot, numclasses):
    '''
      Create a texture lengthscale map using the algorithm detailed by Buscombe et al. (2015)
      This textural lengthscale is not a direct measure of grain size. Rather, it is a statistical 
      representation that integrates over many attributes of bed texture, of which grain size is the most important. 
      The technique is a physically based means to identify regions of texture within a sidescan echogram, 
      and could provide a basis for objective, automated riverbed sediment classification.

      Syntax
      ----------
      [] = PyHum.texture(humfile, sonpath, win, doplot, numclasses)

      Parameters
      ----------
      humfile : str
       path to the .DAT file
      sonpath : str
       path where the *.SON files are
      win : int, *optional* [Default=10]
       pixel in pixels of the moving window
      doplot : int, *optional* [Default=1]
       if 1, make plots, otherwise do not make plots
      numclasses : int, *optional* [Default=4]
       number of 'k means' that the texture lengthscale will be segmented into

      Returns
      -------
      sonpath+base+'_data_class.dat': memory-mapped file
        contains the texture lengthscale map

      sonpath+base+'_data_kclass.dat': memory-mapped file
        contains the k-means segmented texture lengthscale map

      References
      ----------
      .. [1] Buscombe, D., Grams, P.E., and Smith, S.M.C., 2015, Automated riverbed sediment
       classification using low-cost sidescan sonar. Journal of Hydraulic Engineering 10.1061/(ASCE)HY.1943-7900.0001079, 06015019.
      '''

    # prompt user to supply file if no input file given
    if not humfile:
        print('An input file is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        humfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print('A *.SON directory is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print('Input file is %s' % (humfile))

    if sonpath:
        print('Sonar file path is %s' % (sonpath))

    if win:
        win = np.asarray(win, int)
        print('Window is %s square pixels' % (str(win)))

    if numclasses:
        numclasses = np.asarray(numclasses, int)
        print('Number of sediment classes: %s' % (str(numclasses)))

    if doplot:
        doplot = int(doplot)
        if doplot == 0:
            print("Plots will not be made")

    print('[Default] Number of processors is %s' % (str(cpu_count())))

    ########################################################
    ########################################################

    # start timer
    if os.name == 'posix':  # true if linux/mac or cygwin on windows
        start = time.time()
    else:  # windows
        start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')))

    ft = 1 / loadmat(sonpath + base + 'meta.mat')['pix_m']
    #pix_m = np.squeeze(meta['pix_m'])
    #dep_m = np.squeeze(meta['dep_m'])
    dist_m = np.squeeze(meta['dist_m'])

    ### port
    print("processing port side ...")
    # load memory mapped scan ... port
    shape_port = np.squeeze(meta['shape_port'])
    if shape_port != '':

        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_port_lar.dat'))):
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat',
                                       'float32', tuple(shape_port))
        else:
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat',
                                       'float32', tuple(shape_port))

        port_fp2 = io.get_mmap_data(sonpath, base, '_data_port_l.dat',
                                    'float32', tuple(shape_port))

    ### star
    print("processing starboard side ...")
    # load memory mapped scan ... port
    shape_star = np.squeeze(loadmat(sonpath + base + 'meta.mat')['shape_star'])
    if shape_star != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_star_lar.dat'))):
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat',
                                       'float32', tuple(shape_star))
        else:
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat',
                                       'float32', tuple(shape_star))

        star_fp2 = io.get_mmap_data(sonpath, base, '_data_star_l.dat',
                                    'float32', tuple(shape_star))

    if len(shape_star) > 2:
        shape = shape_port.copy()
        shape[1] = shape_port[1] + shape_star[1]
    else:
        shape = []
        shape.append(1)
        shape.append(shape_port[0])
        shape.append(shape_port[1])
        shape[1] = shape_port[0] + shape_star[0]

    # create memory mapped file for Sp
    #with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'w+') as ff:
    #   fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))
    fp = np.zeros(tuple(shape), dtype='float32')

    if len(shape_star) > 2:

        for p in range(len(port_fp)):

            merge = np.vstack((np.flipud(port_fp[p]), star_fp[p]))
            merge = denoise_tv_chambolle(merge.copy(),
                                         weight=2,
                                         multichannel=False).astype('float32')
            Snn = std_convoluted(merge, win)[1]
            del merge

            try:
                Snn = medfilt2d(Snn, (win + 1, win + 1))
            except:
                Snn = medfilt2d(Snn, (win, win))

            Snn[np.isnan(np.vstack(
                (np.flipud(port_fp[p]), star_fp[p])))] = np.nan
            Snn[np.isnan(np.vstack(
                (np.flipud(port_fp2[p]), star_fp2[p])))] = np.nan

            R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat',
                                    'float32', tuple(shape_star))

            R = np.vstack((np.flipud(R_fp[0]), R_fp[0]))

            R = R / np.max(R)

            rn = replace_nans.RN(R.astype('float64'), 1000, 0.01, 2,
                                 'localmean')
            R = rn.getdata()
            del rn

            Sp = (Snn**2) * np.cos(np.deg2rad(R)) / win  ##**2

            fp[p] = Sp.astype('float32')
            del Sp

        #del fp # flush data to file
        shape = io.set_mmap_data(sonpath, base, '_data_class.dat', 'float32',
                                 np.squeeze(fp))
        del fp
        class_fp = io.get_mmap_data(sonpath, base, '_data_class.dat',
                                    'float32', tuple(shape))

    else:

        merge = np.vstack((np.flipud(port_fp), star_fp))
        merge = denoise_tv_chambolle(merge.copy(),
                                     weight=2,
                                     multichannel=False).astype('float32')
        Snn = std_convoluted(merge, win)[1]
        del merge

        try:
            Snn = medfilt2d(Snn, (win + 1, win + 1))
        except:
            Snn = medfilt2d(Snn, (win, win))

        Snn[np.isnan(np.vstack((np.flipud(port_fp), star_fp)))] = np.nan
        Snn[np.isnan(np.vstack((np.flipud(port_fp2), star_fp2)))] = np.nan

        R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32',
                                tuple(shape_star))

        R = np.vstack((np.flipud(R_fp), R_fp))
        R = R / np.max(R)

        rn = replace_nans.RN(R.astype('float64'), 1000, 0.01, 2, 'localmean')
        R = rn.getdata()
        del rn

        Sp = (Snn**2) * np.cos(np.deg2rad(R)) / win  ##**2

        shape = io.set_mmap_data(sonpath, base, '_data_class.dat', 'float32',
                                 np.squeeze(Sp))

        #with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'w+') as ff:
        #   np.save(ff, np.squeeze(Sp).astype('float32'))

        #with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'r') as ff:
        #   class_fp = np.load(ff)

        #del Sp
        class_fp = io.get_mmap_data(sonpath, base, '_data_class.dat',
                                    'float32', tuple(shape))

    dist_m = np.squeeze(loadmat(sonpath + base + 'meta.mat')['dist_m'])

    ########################################################
    if doplot == 1:
        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_class(dist_m, shape_port, port_fp[p], star_fp[p],
                           class_fp[p], ft, humfile, sonpath, base, p)
        else:
            plot_class(dist_m, shape_port, port_fp, star_fp, class_fp, ft,
                       humfile, sonpath, base, 0)

        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_contours(dist_m, shape_port, port_fp[p], star_fp[p],
                              class_fp[p], ft, humfile, sonpath, base,
                              numclasses, p)
        else:
            plot_contours(dist_m, shape_port, port_fp, star_fp, class_fp, ft,
                          humfile, sonpath, base, numclasses, 0)

    #######################################################
    # k-means

    if len(shape_star) > 2:
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'w+') as ff:
            fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))

        for p in range(len(port_fp)):
            wc = get_kclass(class_fp[p].copy(), numclasses)
            fp[p] = wc.astype('float32')
            del wc

        del fp

        kclass_fp = io.get_mmap_data(sonpath, base, '_data_kclass.dat',
                                     'float32', tuple(shape))

    else:
        wc = get_kclass(class_fp.copy(), numclasses)

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'w+') as ff:
            np.save(ff, np.squeeze(wc).astype('float32'))

        del wc

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'r') as ff:
            kclass_fp = np.load(ff)

    ########################################################
    if doplot == 1:

        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_kmeans(dist_m, shape_port, port_fp[p], star_fp[p],
                            kclass_fp[p], ft, humfile, sonpath, base, p)
        else:
            plot_kmeans(dist_m, shape_port, port_fp, star_fp, kclass_fp, ft,
                        humfile, sonpath, base, 0)

    if os.name == 'posix':  # true if linux/mac
        elapsed = (time.time() - start)
    else:  # windows
        elapsed = (time.clock() - start)
    print("Processing took " + str(elapsed) + "seconds to analyse")

    print("Done!")
    print("===================================================")
Ejemplo n.º 2
0
def sliding_window(a,ws,ss = None,flatten = True):
   '''
   Return a sliding window over a in any number of dimensions
   '''
   if None is ss:
      # ss was not provided. the windows will not overlap in any direction.
      ss = ws
   ws = norm_shape(ws)
   ss = norm_shape(ss)
   # convert ws, ss, and a.shape to numpy arrays
   ws = np.array(ws)
   ss = np.array(ss)

   #import PyHum.io as io

   shape_tmp = io.set_mmap_data('', '', 'tmp.dat', 'float32', a)
   del a
   a = io.get_mmap_data('', '', 'tmp.dat', 'float32', shape_tmp)

   shap = np.array(a.shape)

   try:
      os.remove('tmp.dat')
   except:
      pass

   # ensure that ws, ss, and a.shape all have the same number of dimensions
   ls = [len(shap),len(ws),len(ss)]
   if 1 != len(set(ls)):
      raise ValueError(\
      'a.shape, ws and ss must all have the same length. They were %s' % str(ls))

   # ensure that ws is smaller than a in every dimension
   if np.any(ws > shap):
      raise ValueError(\
      'ws cannot be larger than a in any dimension.\
 a.shape was %s and ws was %s' % (str(a.shape),str(ws)))

   # how many slices will there be in each dimension?
   newshape = norm_shape(((shap - ws) // ss) + 1)
   # the shape of the strided array will be the number of slices in each dimension
   # plus the shape of the window (tuple addition)
   newshape += norm_shape(ws)
   # the strides tuple will be the array's strides multiplied by step size, plus

   try:
      # the array's strides (tuple addition)
      newstrides = norm_shape(np.array(a.strides) * ss) + a.strides
      a = ast(a,shape = newshape,strides = newstrides)
      if not flatten:
         return a
      # Collapse strided so that it has one more dimension than the window.  I.e.,
      # the new array is a flat list of slices.
      meat = len(ws) if ws.shape else 0
      firstdim = (int(np.product(newshape[:-meat])),) if ws.shape else ()
      dim = firstdim + (newshape[-meat:])
      # remove any dimensions with size 1
      dim = filter(lambda i : i != 1,dim)

      return a.reshape(dim), newshape

   except:

      from itertools import product
      print "memory error, windowing using slower method"
      # For each dimension, create a list of all valid slices
      slices = [[] for i in range(len(ws))]
      for i in xrange(len(ws)):
         nslices = ((shap[i] - ws[i]) // ss[i]) + 1
         for j in xrange(0,nslices):
            start = j * ss[i]
            stop = start + ws[i]
            slices[i].append(slice(start,stop))
      # Get an iterator over all valid n-dimensional slices of the input
      allslices = product(*slices)

      # Allocate memory to hold all valid n-dimensional slices
      nslices = np.product([len(s) for s in slices])
      #out = np.ndarray((nslices,) + tuple(ws),dtype = a.dtype)
      out=[]
      for i,s in enumerate(allslices):
         #out[i] = a[s]
         out.append(a[s])

      del a
      import dask.bag as db
      tmp = db.from_sequence(out, npartitions=1000)
      del out

      return tmp.compute(), newshape
Ejemplo n.º 3
0
def read(humfile, sonpath, cs2cs_args="epsg:26949", c=1450.0, draft=0.3, doplot=1, t=0.108, bedpick=1, flip_lr=0, model=998, calc_bearing = 0, filt_bearing = 0, chunk='d100'): #cog = 1,

    '''
    Read a .DAT and associated set of .SON files recorded by a Humminbird(R)
    instrument.

    Parse the data into a set of memory mapped files that will
    subsequently be used by the other functions of the PyHum module.

    Export time-series data and metadata in other formats.

    Create a kml file for visualising boat track

    Syntax
    ----------
    [] = PyHum.read(humfile, sonpath, cs2cs_args, c, draft, doplot, t, bedpick, flip_lr, chunksize, model, calc_bearing, filt_bearing, chunk)

    Parameters
    ------------
    humfile : str
       path to the .DAT file
    sonpath : str
       path where the *.SON files are
    cs2cs_args : int, *optional* [Default="epsg:26949"]
       arguments to create coordinates in a projected coordinate system
       this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates
       into any projection supported by the proj.4 libraries
    c : float, *optional* [Default=1450.0]
       speed of sound in water (m/s). Defaults to a value of freshwater
    draft : float, *optional* [Default=0.3]
       draft from water surface to transducer face (m)
    doplot : float, *optional* [Default=1]
       if 1, plots will be made
    t : float, *optional* [Default=0.108]
       length of transducer array (m).
       Default value is that of the 998 series Humminbird(R)
    bedpick : int, *optional* [Default=1]
       if 1, bedpicking with be carried out automatically
       if 0, user will be prompted to pick the bed location on screen
    flip_lr : int, *optional* [Default=0]
       if 1, port and starboard scans will be flipped
       (for situations where the transducer is flipped 180 degrees)
    model: int, *optional* [Default=998]
       A 3 or 4 number code indicating the model number
       Examples: 998, 997, 1198, 1199
    calc_bearing : float, *optional* [Default=0]
       if 1, bearing will be calculated from coordinates
    filt_bearing : float, *optional* [Default=0]
       if 1, bearing will be filtered
    chunk : str, *optional* [Default='d100' (distance, 100 m)]
       letter, followed by a number.
       There are the following letter options:
       'd' - parse chunks based on distance, then number which is distance in m
       'p' - parse chunks based on number of pings, then number which is number of pings
       'h' - parse chunks based on change in heading, then number which is the change in heading in degrees
       '1' - process just 1 chunk

    Returns
    ---------
    sonpath+base+'_data_port.dat': memory-mapped file
        contains the raw echogram from the port side
        sidescan sonar (where present)

    sonpath+base+'_data_port.dat': memory-mapped file
        contains the raw echogram from the starboard side
        sidescan sonar (where present)

    sonpath+base+'_data_dwnhi.dat': memory-mapped file
        contains the raw echogram from the high-frequency
        echosounder (where present)

    sonpath+base+'_data_dwnlow.dat': memory-mapped file
        contains the raw echogram from the low-frequency
        echosounder (where present)

    sonpath+base+"trackline.kml": google-earth kml file
        contains the trackline of the vessel during data
        acquisition

    sonpath+base+'rawdat.csv': comma separated value file
        contains time-series data. columns corresponding to
        longitude
        latitude
        easting (m)
        northing (m)
        depth to bed (m)
        alongtrack cumulative distance (m)
        vessel heading (deg.)

    sonpath+base+'meta.mat': .mat file
        matlab format file containing a dictionary object
        holding metadata information. Fields are:
        e : ndarray, easting (m)
        n : ndarray, northing (m)
        es : ndarray, low-pass filtered easting (m)
        ns : ndarray, low-pass filtered northing (m)
        lat : ndarray, latitude
        lon : ndarray, longitude
        shape_port : tuple, shape of port scans in memory mapped file
        shape_star : tuple, shape of starboard scans in memory mapped file
        shape_hi : tuple, shape of high-freq. scans in memory mapped file
        shape_low : tuple, shape of low-freq. scans in memory mapped file
        dep_m : ndarray, depth to bed (m)
        dist_m : ndarray, distance along track (m)
        heading : ndarray, heading of vessel (deg. N)
        pix_m: float, size of 1 pixel in across-track dimension (m)
        bed : ndarray, depth to bed (m)
        c : float, speed of sound in water (m/s)
        t : length of sidescan transducer array (m)
        spd : ndarray, vessel speed (m/s)
        time_s : ndarray, time elapsed (s)
        caltime : ndarray, unix epoch time (s)
    '''

    # prompt user to supply file if no input file given
    if not humfile:
      print 'An input file is required!!!!!!'
      Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
      humfile = askopenfilename(filetypes=[("DAT files","*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
      print 'A *.SON directory is required!!!!!!'
      Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
      sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
      print 'Input file is %s' % (humfile)

    if sonpath:
      print 'Son files are in %s' % (sonpath)

    if cs2cs_args:
      print 'cs2cs arguments are %s' % (cs2cs_args)

    if draft:
      draft = float(draft)
      print 'Draft: %s' % (str(draft))

    if c:
      c = float(c)
      print 'Celerity of sound: %s m/s' % (str(c))

    if doplot:
      doplot = int(doplot)
      if doplot==0:
         print "Plots will not be made"

    if flip_lr:
      flip_lr = int(flip_lr)
      if flip_lr==1:
         print "Port and starboard will be flipped"

    if t:
      t = np.asarray(t,float)
      print 'Transducer length is %s m' % (str(t))

    if bedpick:
      bedpick = np.asarray(bedpick,int)
      if bedpick==1:
         print 'Bed picking is auto'
      elif bedpick==0:
         print 'Bed picking is manual'
      else:
         print 'User will be prompted per chunk about bed picking method'

    if chunk:
       chunk = str(chunk)
       if chunk[0]=='d':
          chunkmode=1
          chunkval = int(chunk[1:])
          print 'Chunks based on distance of %s m' % (str(chunkval))
       elif chunk[0]=='p':
          chunkmode=2
          chunkval = int(chunk[1:])
          print 'Chunks based on %s pings' % (str(chunkval))
       elif chunk[0]=='h':
          chunkmode=3
          chunkval = int(chunk[1:])
          print 'Chunks based on heading devation of %s degrees' % (str(chunkval))
       elif chunk[0]=='1':
          chunkmode=4
          chunkval = 1
          print 'Only 1 chunk will be produced'
       else:
          print "Chunk mode not understood - should be 'd', 'p', or 'h' - using defaults"
          chunkmode=1
          chunkval = 100
          print 'Chunks based on distance of %s m' % (str(chunkval))

    if model:
       model = int(model)
       print "Data is from the %s series"  % (str(model))

#    if cog:
#       cog = int(cog)
#       if cog==1:
#          print "Heading based on course-over-ground"

    if calc_bearing:
       calc_bearing = int(calc_bearing)
       if calc_bearing==1:
          print "Bearing will be calculated from coordinates"

    if filt_bearing:
       filt_bearing = int(filt_bearing)
       if filt_bearing==1:
          print "Bearing will be filtered"

    ## for debugging
    #humfile = r"test.DAT"; sonpath = "test_data"
    #cs2cs_args = "epsg:26949"; doplot = 1; draft = 0
    #c=1450; bedpick=1; fliplr=1; chunk = 'd100'
    #model=998; cog=1; calc_bearing=0; filt_bearing=0

    f = 455

    try:
       print "Checking the epsg code you have chosen for compatibility with Basemap ... "
       from mpl_toolkits.basemap import Basemap
       m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1],
          resolution = 'i', llcrnrlon=10, llcrnrlat=10, urcrnrlon=30, urcrnrlat=30)
       del m
       print "... epsg code compatible"
    except:
       print "Error: the epsg code you have chosen is not compatible with Basemap"
       print "please choose a different epsg code (http://spatialreference.org/)"
       print "program will now close"
       sys.exit()

    # start timer
    if os.name=='posix': # true if linux/mac or cygwin on windows
       start = time.time()
    else: # windows
       start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1]!=os.sep:
       sonpath = sonpath + os.sep

    # get the SON files from this directory
    sonfiles = glob.glob(sonpath+'*.SON')
    if not sonfiles:
        sonfiles = glob.glob(os.getcwd()+os.sep+sonpath+'*.SON')

    base = humfile.split('.DAT') # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    print "WARNING: Because files have to be read in byte by byte,"
    print "this could take a very long time ..."

    #reading each sonfile in parallel should be faster ...
    try:
       o = Parallel(n_jobs = np.min([len(sonfiles), cpu_count()]), verbose=0)(delayed(getscans)(sonfiles[k], humfile, c, model, cs2cs_args) for k in xrange(len(sonfiles)))
       X, Y, A, B = zip(*o)

       for k in xrange(len(Y)):
          if Y[k] == 'sidescan_port':
             dat = A[k] #data.gethumdat()
             metadat = B[k] #data.getmetadata()
             if flip_lr==0:
                data_port = X[k].astype('int16')
             else:
                data_star = X[k].astype('int16')

          elif Y[k] == 'sidescan_starboard':
             if flip_lr==0:
                data_star = X[k].astype('int16')
             else:
                data_port = X[k].astype('int16')

          elif Y[k] == 'down_lowfreq':
             data_dwnlow = X[k].astype('int16')

          elif Y[k] == 'down_highfreq':
             data_dwnhi = X[k].astype('int16')

       del X, Y, A, B, o
       old_pyread = 0

       if 'data_port' not in locals():
          data_port = ''
          print "portside scan not available"

       if 'data_star' not in locals():
          data_star = ''
          print "starboardside scan not available"

       if 'data_dwnhi' not in locals():
          data_dwnlow = ''
          print "high-frq. downward scan not available"

       if 'data_dwnlow' not in locals():
          data_dwnlow = ''
          print "low-frq. downward scan not available"

    except: # revert back to older version if paralleleised version fails

       print "something went wrong with the parallelised version of pyread ..."

       import pyread
       data = pyread.pyread(sonfiles, humfile, c, model, cs2cs_args)

       dat = data.gethumdat()

       metadat = data.getmetadata()

       old_pyread = 1

    nrec = len(metadat['n'])

    metadat['instr_heading'] = metadat['heading'][:nrec]

    #metadat['heading'] = humutils.get_bearing(calc_bearing, filt_bearing, cog, metadat['lat'], metadat['lon'], metadat['instr_heading'])

    try:
       es = humutils.runningMeanFast(metadat['e'][:nrec],len(metadat['e'][:nrec])/100)
       ns = humutils.runningMeanFast(metadat['n'][:nrec],len(metadat['n'][:nrec])/100)
    except:
       es = metadat['e'][:nrec]
       ns = metadat['n'][:nrec]

    metadat['es'] = es
    metadat['ns'] = ns

    try:
       trans =  pyproj.Proj(init=cs2cs_args)
    except:
       trans =  pyproj.Proj(cs2cs_args.lstrip(), inverse=True)

    lon, lat = trans(es, ns, inverse=True)
    metadat['lon'] = lon
    metadat['lat'] = lat

    metadat['heading'] = humutils.get_bearing(calc_bearing, filt_bearing, metadat['lat'], metadat['lon'], metadat['instr_heading']) #cog

    dist_m = humutils.get_dist(lat, lon)
    metadat['dist_m'] = dist_m

    if calc_bearing==1: # recalculate speed, m/s
       ds=np.gradient(np.squeeze(metadat['time_s']))
       dx=np.gradient(np.squeeze(metadat['dist_m']))
       metadat['spd'] = dx[:nrec]/ds[:nrec]

    # theta at 3dB in the horizontal
    theta3dB = np.arcsin(c/(t*(f*1000)))
    #resolution of 1 sidescan pixel to nadir
    ft = (np.pi/2)*(1/theta3dB) #/ (f/455)

    dep_m = humutils.get_depth(metadat['dep_m'][:nrec])

    if old_pyread == 1: #older pyread version

       # port scan
       try:
          if flip_lr==0:
             data_port = data.getportscans().astype('int16')
          else:
             data_port = data.getstarscans().astype('int16')
       except:
          data_port = ''
          print "portside scan not available"

    if data_port!='':

       Zt, ind_port = makechunks_scan(chunkmode, chunkval, metadat, data_port, 0)
       del data_port

       ## create memory mapped file for Z
       shape_port = io.set_mmap_data(sonpath, base, '_data_port.dat', 'int16', Zt)

       ##we are only going to access the portion of memory required
       port_fp = io.get_mmap_data(sonpath, base, '_data_port.dat', 'int16', shape_port)

    if old_pyread == 1: #older pyread version
       # starboard scan
       try:
          if flip_lr==0:
             data_star = data.getstarscans().astype('int16')
          else:
             data_star = data.getportscans().astype('int16')
       except:
          data_star = ''
          print "starboardside scan not available"

    if data_star!='':

       Zt, ind_star = makechunks_scan(chunkmode, chunkval, metadat, data_star, 1)
       del data_star

       # create memory mapped file for Z
       shape_star = io.set_mmap_data(sonpath, base, '_data_star.dat', 'int16', Zt)

       star_fp = io.get_mmap_data(sonpath, base, '_data_star.dat', 'int16', shape_star)

    if 'star_fp' in locals() and 'port_fp' in locals():
       # check that port and starboard are same size
       # and trim if not
       if np.shape(star_fp)!=np.shape(port_fp):
          print "port and starboard scans are different sizes ... rectifying"
          if np.shape(port_fp[0])[1] > np.shape(star_fp[0])[1]:
             tmp = port_fp.copy()
             tmp2 = np.empty_like(star_fp)
             for k in xrange(len(tmp)):
                 tmp2[k] = tmp[k][:,:np.shape(star_fp[k])[1]]
             del tmp

             # create memory mapped file for Z
             shape_port = io.set_mmap_data(sonpath, base, '_data_port2.dat', 'int16', tmp2)
             #shape_star = shape_port.copy()
             shape_star = tuple(np.asarray(shape_port).copy())

             ##we are only going to access the portion of memory required
             port_fp = io.get_mmap_data(sonpath, base, '_data_port2.dat', 'int16', shape_port)

             ind_port = list(ind_port)
             ind_port[-1] = np.shape(star_fp[0])[1]
             ind_port = tuple(ind_port)

          elif np.shape(port_fp[0])[1] < np.shape(star_fp[0])[1]:
             tmp = star_fp.copy()
             tmp2 = np.empty_like(port_fp)
             for k in xrange(len(tmp)):
                 tmp2[k] = tmp[k][:,:np.shape(port_fp[k])[1]]
             del tmp

             # create memory mapped file for Z
             shape_port = io.set_mmap_data(sonpath, base, '_data_star2.dat', 'int16', tmp2)
             #shape_star = shape_port.copy()
             shape_star = tuple(np.asarray(shape_port).copy())

             #we are only going to access the portion of memory required
             star_fp = io.get_mmap_data(sonpath, base, '_data_star2.dat', 'int16', shape_star)

             ind_star = list(ind_star)
             ind_star[-1] = np.shape(port_fp[0])[1]
             ind_star = tuple(ind_star)

    if old_pyread == 1: #older pyread version
       # low-freq. sonar
       try:
          data_dwnlow = data.getlowscans().astype('int16')
       except:
          data_dwnlow = ''
          print "low-freq. scan not available"

    if data_dwnlow!='':

       Zt, ind_low = makechunks_scan(chunkmode, chunkval, metadat, data_dwnlow, 2)
       del data_dwnlow

       # create memory mapped file for Z
       shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow.dat', 'int16', Zt)

       ##we are only going to access the portion of memory required
       dwnlow_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat', 'int16', shape_low)

    if old_pyread == 1: #older pyread version
       # hi-freq. sonar
       try:
          data_dwnhi = data.gethiscans().astype('int16')
       except:
          data_dwnhi = ''
          print "high-freq. scan not available"

    if data_dwnhi!='':

       Zt, ind_hi = makechunks_scan(chunkmode, chunkval, metadat, data_dwnhi, 3)
       del data_dwnhi

       # create memory mapped file for Z
       shape_hi = io.set_mmap_data(sonpath, base, '_data_dwnhi.dat', 'int16', Zt)

       dwnhi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat', 'int16', shape_hi)

    if 'dwnhi_fp' in locals() and 'dwnlow_fp' in locals():
       # check that low and high are same size
       # and trim if not
       if (np.shape(dwnhi_fp)!=np.shape(dwnlow_fp)) and (chunkmode!=4):
          print "dwnhi and dwnlow are different sizes ... rectifying"
          if np.shape(dwnhi_fp[0])[1] > np.shape(dwnlow_fp[0])[1]:
             tmp = dwnhi_fp.copy()
             tmp2 = np.empty_like(dwnlow_fp)
             for k in xrange(len(tmp)):
                 tmp2[k] = tmp[k][:,:np.shape(dwnlow_fp[k])[1]]
             del tmp

             # create memory mapped file for Z
             shape_low = io.set_mmap_data(sonpath, base, '_data_dwnhi2.dat', 'int16', tmp2)
             #shape_hi = shape_low.copy()
             shape_hi = tuple(np.asarray(shape_low).copy())

             ##we are only going to access the portion of memory required
             dwnhi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi2.dat', 'int16', shape_hi)

             ind_hi = list(ind_hi)
             ind_hi[-1] = np.shape(dwnlow_fp[0])[1]
             ind_hi = tuple(ind_hi)

          elif np.shape(dwnhi_fp[0])[1] < np.shape(dwnlow_fp[0])[1]:
             tmp = dwnlow_fp.copy()
             tmp2 = np.empty_like(dwnhi_fp)
             for k in xrange(len(tmp)):
                 tmp2[k] = tmp[k][:,:np.shape(dwnhi_fp[k])[1]]
             del tmp

             # create memory mapped file for Z
             shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow2.dat', 'int16', tmp2)
             #shape_hi = shape_low.copy()
             shape_hi = tuple(np.asarray(shape_low).copy())

             ##we are only going to access the portion of memory required
             dwnlow_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow2.dat', 'int16', shape_low)

             ind_low = list(ind_low)
             ind_low[-1] = np.shape(dwnhi_fp[0])[1]
             ind_low = tuple(ind_low)

    if old_pyread == 1: #older pyread version
       del data

    if ('shape_port' in locals()) and (chunkmode!=4):
       metadat['shape_port'] = shape_port
       nrec = metadat['shape_port'][0] * metadat['shape_port'][2]
    elif ('shape_port' in locals()) and (chunkmode==4):
       metadat['shape_port'] = shape_port
       nrec = metadat['shape_port'][1]
    else:
       metadat['shape_port'] = ''

    if ('shape_star' in locals()) and (chunkmode!=4):
       metadat['shape_star'] = shape_star
       nrec = metadat['shape_star'][0] * metadat['shape_star'][2]
    elif ('shape_star' in locals()) and (chunkmode==4):
       metadat['shape_star'] = shape_star
       nrec = metadat['shape_star'][1]
    else:
       metadat['shape_star'] = ''

    if ('shape_hi' in locals()) and (chunkmode!=4):
       metadat['shape_hi'] = shape_hi
       #nrec = metadat['shape_hi'][0] * metadat['shape_hi'][2] * 2
    elif ('shape_hi' in locals()) and (chunkmode==4):
       metadat['shape_hi'] = shape_hi
    else:
       metadat['shape_hi'] = ''

    if ('shape_low' in locals()) and (chunkmode!=4):
       metadat['shape_low'] = shape_low
       #nrec = metadat['shape_low'][0] * metadat['shape_low'][2] * 2
    elif ('shape_low' in locals()) and (chunkmode==4):
       metadat['shape_low'] = shape_low
    else:
       metadat['shape_low'] = ''

    #make kml boat trackline
    humutils.make_trackline(lon,lat, sonpath, base)

    if 'port_fp' in locals() and 'star_fp' in locals():

       #if not os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'meta.mat'))):
       if 2>1:
          if bedpick == 1: # auto

             x, bed = humutils.auto_bedpick(ft, dep_m, chunkmode, port_fp, c)

             if len(dist_m)<len(bed):
                dist_m = np.append(dist_m,dist_m[-1]*np.ones(len(bed)-len(dist_m)))

             if doplot==1:
                if chunkmode!=4:
                   for k in xrange(len(star_fp)):
                      plot_2bedpicks(port_fp[k], star_fp[k], bed[ind_port[-1]*k:ind_port[-1]*(k+1)], dist_m[ind_port[-1]*k:ind_port[-1]*(k+1)], x[ind_port[-1]*k:ind_port[-1]*(k+1)], ft, shape_port, sonpath, k, chunkmode)
                else:
                   plot_2bedpicks(port_fp, star_fp, bed, dist_m, x, ft, shape_port, sonpath, 0, chunkmode)

             # 'real' bed is estimated to be the minimum of the two
             bed = np.min(np.vstack((bed[:nrec],np.squeeze(x[:nrec]))),axis=0)
             bed = humutils.runningMeanFast(bed, 3)

          elif bedpick>1: # user prompt

             x, bed = humutils.auto_bedpick(ft, dep_m, chunkmode, port_fp, c)

             if len(dist_m)<len(bed):
                dist_m = np.append(dist_m,dist_m[-1]*np.ones(len(bed)-len(dist_m)))

             # 'real' bed is estimated to be the minimum of the two
             bed = np.min(np.vstack((bed[:nrec],np.squeeze(x[:nrec]))),axis=0)
             bed = humutils.runningMeanFast(bed, 3)

             # manually intervene
             fig = plt.figure()
             ax = plt.gca()
             if chunkmode !=4:
                im = ax.imshow(np.hstack(port_fp), cmap = 'gray', origin = 'upper')
             else:
                im = ax.imshow(port_fp, cmap = 'gray', origin = 'upper')
             plt.plot(bed,'r')
             plt.axis('normal'); plt.axis('tight')

             pts1 = plt.ginput(n=300, timeout=30) # it will wait for 200 clicks or 60 seconds
             x1=map(lambda x: x[0],pts1) # map applies the function passed as
             y1=map(lambda x: x[1],pts1) # first parameter to each element of pts
             plt.close()
             del fig

             if x1 != []: # if x1 is not empty
                tree = KDTree(zip(np.arange(1,len(bed)), bed))
                try:
                   dist, inds = tree.query(zip(x1, y1), k = 100, eps=5, n_jobs=-1)
                except:
                   dist, inds = tree.query(zip(x1, y1), k = 100, eps=5)

                b = np.interp(inds,x1,y1)
                bed2 = bed.copy()
                bed2[inds] = b
                bed = bed2

             if doplot==1:
                if chunkmode!=4:
                   for k in xrange(len(star_fp)):
                      plot_2bedpicks(port_fp[k], star_fp[k], bed[ind_port[-1]*k:ind_port[-1]*(k+1)], dist_m[ind_port[-1]*k:ind_port[-1]*(k+1)], x[ind_port[-1]*k:ind_port[-1]*(k+1)], ft, shape_port, sonpath, k, chunkmode)
                else:
                   plot_2bedpicks(port_fp, star_fp, bed, dist_m, x, ft, shape_port, sonpath, 0, chunkmode)

          else: #manual

             beds=[]

             if chunkmode!=4:
                for k in xrange(len(port_fp)):
                   raw_input("Bed picking "+str(k+1)+" of "+str(len(port_fp))+", are you ready? 30 seconds. Press Enter to continue...")
                   bed={}
                   fig = plt.figure()
                   ax = plt.gca()
                   im = ax.imshow(port_fp[k], cmap = 'gray', origin = 'upper')
                   pts1 = plt.ginput(n=300, timeout=30) # it will wait for 200 clicks or 60 seconds
                   x1=map(lambda x: x[0],pts1) # map applies the function passed as
                   y1=map(lambda x: x[1],pts1) # first parameter to each element of pts
                   bed = np.interp(np.r_[:ind_port[-1]],x1,y1)
                   plt.close()
                   del fig
                   beds.append(bed)
                   extent = np.shape(port_fp[k])[0]
                bed = np.asarray(np.hstack(beds),'float')
             else:
                raw_input("Bed picking - are you ready? 30 seconds. Press Enter to continue...")
                bed={}
                fig = plt.figure()
                ax = plt.gca()
                im = ax.imshow(port_fp, cmap = 'gray', origin = 'upper')
                pts1 = plt.ginput(n=300, timeout=30) # it will wait for 200 clicks or 60 seconds
                x1=map(lambda x: x[0],pts1) # map applies the function passed as
                y1=map(lambda x: x[1],pts1) # first parameter to each element of pts
                bed = np.interp(np.r_[:ind_port[-1]],x1,y1)
                plt.close()
                del fig
                beds.append(bed)
                extent = np.shape(port_fp)[1]
                bed = np.asarray(np.hstack(beds),'float')

          # now revise the depth in metres
          dep_m = (1/ft)*bed

          if doplot==1:
             if chunkmode!=4:
                for k in xrange(len(star_fp)):
                   plot_bedpick(port_fp[k], star_fp[k], (1/ft)*bed[ind_port[-1]*k:ind_port[-1]*(k+1)], dist_m[ind_port[-1]*k:ind_port[-1]*(k+1)], ft, shape_port, sonpath, k, chunkmode)
             else:
                plot_bedpick(port_fp, star_fp, (1/ft)*bed, dist_m, ft, shape_port, sonpath, 0, chunkmode)

          metadat['bed'] = bed[:nrec]

    else:
       metadat['bed'] = dep_m[:nrec]*ft

    metadat['heading'] = metadat['heading'][:nrec]
    metadat['lon'] = lon[:nrec]
    metadat['lat'] = lat[:nrec]
    metadat['dist_m'] = dist_m[:nrec]
    metadat['dep_m'] = dep_m[:nrec]
    metadat['pix_m'] = 1/ft
    metadat['bed'] = metadat['bed'][:nrec]
    metadat['c'] = c
    metadat['t'] = t
    metadat['f'] = f

    metadat['spd'] = metadat['spd'][:nrec]
    metadat['time_s'] = metadat['time_s'][:nrec]
    metadat['e'] = metadat['e'][:nrec]
    metadat['n'] = metadat['n'][:nrec]
    metadat['es'] = metadat['es'][:nrec]
    metadat['ns'] = metadat['ns'][:nrec]
    metadat['caltime'] = metadat['caltime'][:nrec]

    savemat(os.path.normpath(os.path.join(sonpath,base+'meta.mat')), metadat ,oned_as='row')

    f = open(os.path.normpath(os.path.join(sonpath,base+'rawdat.csv')), 'wt')
    writer = csv.writer(f)
    writer.writerow( ('longitude', 'latitude', 'easting', 'northing', 'depth (m)', 'distance (m)', 'instr. heading (deg)', 'heading (deg.)' ) )
    for i in range(0, nrec):
       writer.writerow(( float(lon[i]),float(lat[i]),float(es[i]),float(ns[i]),float(dep_m[i]),float(dist_m[i]), float(metadat['instr_heading'][i]), float(metadat['heading'][i]) ))
    f.close()

    del lat, lon, dep_m #, dist_m

    if doplot==1:

       plot_pos(sonpath, metadat, es, ns)

       if 'dwnlow_fp' in locals():

          plot_dwnlow(dwnlow_fp, chunkmode, sonpath)

       if 'dwnhi_fp' in locals():

          plot_dwnhi(dwnhi_fp, chunkmode, sonpath)

    if os.name=='posix': # true if linux/mac
       elapsed = (time.time() - start)
    else: # windows
       elapsed = (time.clock() - start)
    print "Processing took ", elapsed , "seconds to analyse"

    print "Done!"
Ejemplo n.º 4
0
def correct(humfile,
            sonpath,
            maxW=1000,
            doplot=1,
            dofilt=0,
            correct_withwater=0,
            ph=7,
            temp=10,
            salinity=0,
            dconcfile=None):
    '''
    Remove water column and carry out some rudimentary radiometric corrections, 
    accounting for directivity and attenuation with range

    Syntax
    ----------
    [] = PyHum.correct(humfile, sonpath, maxW, doplot, correct_withwater, ph, temp, salinity, dconcfile)

    Parameters
    ----------
    humfile : str
       path to the .DAT file

    sonpath : str
       path where the *.SON files are

    maxW : int, *optional* [Default=1000]
       maximum transducer power

    doplot : int, *optional* [Default=1]
       1 = make plots, otherwise do not

    dofilt : int, *optional* [Default=0]
       1 = apply a phase preserving filter to the scans

    correct_withwater : int, *optional* [Default=0]
       1 = apply radiometric correction but don't remove water column from scans

    ph : float, *optional* [Default=7.0]
       water acidity in pH

    temp : float, *optional* [Default=10.0]
       water temperature in degrees Celsius

    salinity : float, *optional* [Default=0.0]
       salinity of water in parts per thousand

    dconcfile : str, *optional* [Default=None]
       file path of a text file containing sediment concentration data
       this file must contain the following fields separated by spaces:
       size (microns) conc (mg/L) dens (kg/m3)
       with one row per grain size, for example:
       30 1700 2200
       100 15 2650

    Returns
    -------
    sonpath+base+'_data_star_l.dat': memory-mapped file
        contains the starboard scan with water column removed

    sonpath+base+'_data_port_l.dat': memory-mapped file
        contains the portside scan with water column removed

    sonpath+base+'_data_star_la.dat': memory-mapped file
        contains the starboard scan with water column removed and 
        radiometrically corrected

    sonpath+base+'_data_port_la.dat': memory-mapped file
        contains the portside scan with water column removed and
        radiometrically corrected

    sonpath+base+'_data_range.dat': memory-mapped file
        contains the cosine of the range which is used to correct
        for attenuation with range

    sonpath+base+'_data_dwnlow_l.dat': memory-mapped file
        contains the low freq. downward scan with water column removed

    sonpath+base+'_data_dwnhi_l.dat': memory-mapped file
        contains the high freq. downward  scan with water column removed

    sonpath+base+'_data_dwnlow_la.dat': memory-mapped file
        contains the low freq. downward  scan with water column removed and 
        radiometrically corrected

    sonpath+base+'_data_dwnhi_la.dat': memory-mapped file
        contains the high freq. downward  scan with water column removed and
        radiometrically corrected
    
    if correct_withwater == 1:
    
       sonpath+base+'_data_star_lw.dat': memory-mapped file
           contains the starboard scan with water column retained and 
           radiometrically corrected

       sonpath+base+'_data_port_lw.dat': memory-mapped file
           contains the portside scan with water column retained and
           radiometrically corrected

    '''

    # prompt user to supply file if no input file given
    if not humfile:
        print 'An input file is required!!!!!!'
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        inputfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print 'A *.SON directory is required!!!!!!'
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print 'Input file is %s' % (humfile)

    if sonpath:
        print 'Sonar file path is %s' % (sonpath)

    if maxW:
        maxW = np.asarray(maxW, float)
        print 'Max. transducer power is %s W' % (str(maxW))

    if doplot:
        doplot = int(doplot)
        if doplot == 0:
            print "Plots will not be made"

    if dofilt:
        dofilt = int(dofilt)
        if dofilt == 0:
            print "Phase preserving filter will not be applied"
        else:
            print "Phase preserving filter will be applied"

    if correct_withwater:
        correct_withwater = int(correct_withwater)
        if correct_withwater == 1:
            print "Correction will be applied without removing water column"

    if salinity:
        salinity = np.asarray(salinity, float)
        print 'Salinity is %s ppt' % (str(salinity))

    if ph:
        ph = np.asarray(ph, float)
        print 'pH is %s' % (str(ph))

    if temp:
        temp = np.asarray(temp, float)
        print 'Temperature is %s' % (str(temp))

    if dconcfile is not None:
        try:
            print 'Suspended sediment size/conc. file is %s' % (dconcfile)
            dconc = np.genfromtxt(dconcfile).T
            conc = dconc[1]
            dens = dconc[2]
            d = dconc[0]
        except:
            pass

    #================================
    # start timer
    if os.name == 'posix':  # true if linux/mac or cygwin on windows
        start = time.time()
    else:  # windows
        start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    # add wattage to metadata dict
    meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')))

    dep_m = meta['dep_m'][0]
    pix_m = meta['pix_m'][0]

    meta['maxW'] = maxW
    savemat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')),
            meta,
            oned_as='row')

    bed = np.squeeze(meta['bed'])
    ft = 1 / (meta['pix_m'])
    dist_m = np.squeeze(meta['dist_m'])

    try:
        if dconcfile is not None:
            # sediment attenuation
            alpha = sed_atten(meta['f'], conc, dens, d, meta['c'])
        else:
            alpha = 0
    except:
        alpha = 0

    # load memory mapped scans
    shape_port = np.squeeze(meta['shape_port'])
    if shape_port != '':

        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_port2.dat'))):
            port_fp = io.get_mmap_data(sonpath, base, '_data_port2.dat',
                                       'int16', tuple(shape_port))

        else:
            port_fp = io.get_mmap_data(sonpath, base, '_data_port.dat',
                                       'int16', tuple(shape_port))

    shape_star = np.squeeze(meta['shape_star'])
    if shape_star != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_star2.dat'))):
            star_fp = io.get_mmap_data(sonpath, base, '_data_star2.dat',
                                       'int16', tuple(shape_star))

        else:
            star_fp = io.get_mmap_data(sonpath, base, '_data_star.dat',
                                       'int16', tuple(shape_star))

    if len(shape_star) == 2:
        extent = shape_star[0]
    else:
        extent = shape_star[1]  #np.shape(data_port)[0]

    bed = np.asarray(bed, 'int') + int(0.25 * ft)

    # calculate in dB
    ######### star
    Zt, R, A = remove_water(star_fp, bed, shape_star, dep_m, pix_m, 1, maxW)

    Zt = np.squeeze(Zt)

    # create memory mapped file for Z)
    shape_star = io.set_mmap_data(sonpath, base, '_data_star_l.dat', 'float32',
                                  Zt)
    del Zt

    A = np.squeeze(A)
    # create memory mapped file for A
    shape_A = io.set_mmap_data(sonpath, base, '_data_incidentangle.dat',
                               'float32', A)
    del A

    R = np.squeeze(R)
    R[np.isnan(R)] = 0

    try:
        alpha_w = water_atten(R, meta['f'], meta['c'], ph, temp, salinity)
    except:
        alpha_w = 1e-5

    # compute transmission losses
    TL = (40 * np.log10(R) + alpha_w + (2 * alpha) * R / 1000) / 255
    del alpha_w

    # create memory mapped file for R
    shape_R = io.set_mmap_data(sonpath, base, '_data_range.dat', 'float32', R)
    del R

    TL[np.isnan(TL)] = 0
    TL[TL < 0] = 0
    shape_TL = io.set_mmap_data(sonpath, base, '_data_TL.dat', 'float32', TL)
    del TL

    A_fp = io.get_mmap_data(sonpath, base, '_data_incidentangle.dat',
                            'float32', shape_star)
    TL_fp = io.get_mmap_data(sonpath, base, '_data_TL.dat', 'float32',
                             shape_star)

    R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32',
                            shape_star)

    if correct_withwater == 1:
        Zt = correct_scans(star_fp, A_fp, TL_fp, dofilt)

        # create memory mapped file for Z)
        shape_star = io.set_mmap_data(sonpath, base, '_data_star_lw.dat',
                                      'float32', Zt)

    #we are only going to access the portion of memory required
    star_fp = io.get_mmap_data(sonpath, base, '_data_star_l.dat', 'float32',
                               shape_star)

    ##Zt = correct_scans(star_fp, A_fp, TL_fp, dofilt)

    #phi=1.69
    alpha = 59  # vertical beam width at 3db
    theta = 35  #opening angle theta

    # lambertian correction
    Zt = correct_scans_lambertian(star_fp, A_fp, TL_fp, R_fp, meta['c'],
                                  meta['f'], theta, alpha)

    Zt = np.squeeze(Zt)

    avg = np.nanmedian(Zt, axis=1)

    Zt2 = np.empty(np.shape(Zt))

    for kk in xrange(np.shape(Zt)[1]):
        Zt2[:, kk] = (Zt[:, kk] - avg) + np.nanmean(avg)
    Zt2[Zt <= 0] = np.nan
    Zt2[Zt2 <= 0] = np.nan
    del Zt

    # create memory mapped file for Z
    shape_star = io.set_mmap_data(sonpath, base, '_data_star_la.dat',
                                  'float32', Zt2)
    del Zt2

    #we are only going to access the portion of memory required
    star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat', 'float32',
                               shape_star)

    ######### port
    if correct_withwater == 1:
        Zt = correct_scans(port_fp, A_fp, TL, dofilt)

        # create memory mapped file for Z)
        shape_port = io.set_mmap_data(sonpath, base, '_data_port_lw.dat',
                                      'float32', Zt)

    Zt = remove_water(port_fp, bed, shape_port, dep_m, pix_m, 0, maxW)

    Zt = np.squeeze(Zt)

    # create memory mapped file for Z
    shape_port = io.set_mmap_data(sonpath, base, '_data_port_l.dat', 'float32',
                                  Zt)

    #we are only going to access the portion of memory required
    port_fp = io.get_mmap_data(sonpath, base, '_data_port_l.dat', 'float32',
                               shape_port)

    ##Zt = correct_scans(port_fp, A_fp, TL_fp, dofilt)

    # lambertian correction
    Zt = correct_scans_lambertian(port_fp, A_fp, TL_fp, R_fp, meta['c'],
                                  meta['f'], theta, alpha)

    Zt = np.squeeze(Zt)

    Zt2 = np.empty(np.shape(Zt))

    for kk in xrange(np.shape(Zt)[1]):
        Zt2[:, kk] = (Zt[:, kk] - avg) + np.nanmean(avg)
    Zt2[Zt <= 0] = np.nan
    Zt2[Zt2 <= 0] = np.nan
    del Zt

    # create memory mapped file for Z
    shape_port = io.set_mmap_data(sonpath, base, '_data_port_la.dat',
                                  'float32', Zt2)
    del Zt2

    #we are only going to access the portion of memory required
    port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat', 'float32',
                               shape_port)

    ## do plots of merged scans
    if doplot == 1:
        if correct_withwater == 1:

            port_fpw = io.get_mmap_data(sonpath, base, '_data_port_lw.dat',
                                        'float32', shape_port)

            star_fpw = io.get_mmap_data(sonpath, base, '_data_star_lw.dat',
                                        'float32', shape_star)

            if len(np.shape(star_fpw)) > 2:
                for p in xrange(len(star_fpw)):
                    plot_merged_scans(port_fpw[p], star_fpw[p], dist_m,
                                      shape_port, ft, sonpath, p)
            else:
                plot_merged_scans(port_fpw, star_fpw, dist_m, shape_port, ft,
                                  sonpath, 0)

        else:

            if len(np.shape(star_fp)) > 2:
                for p in xrange(len(star_fp)):
                    plot_merged_scans(port_fp[p], star_fp[p], dist_m,
                                      shape_port, ft, sonpath, p)
            else:
                plot_merged_scans(port_fp, star_fp, dist_m, shape_port, ft,
                                  sonpath, 0)

    # load memory mapped scans
    shape_low = np.squeeze(meta['shape_low'])
    shape_hi = np.squeeze(meta['shape_hi'])

    if shape_low != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_dwnlow2.dat'))):
            try:
                low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow2.dat',
                                          'int16', tuple(shape_low))

            except:
                low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat',
                                          'int16', tuple(shape_low))

            finally:
                low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat',
                                          'int16', tuple(shape_hi))

                #if 'shape_hi' in locals():
                #   low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow2.dat', 'int16', tuple(shape_hi))

        else:

            try:
                low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat',
                                          'int16', tuple(shape_low))

            except:
                if 'shape_hi' in locals():
                    low_fp = io.get_mmap_data(sonpath, base,
                                              '_data_dwnlow.dat', 'int16',
                                              tuple(shape_hi))

    shape_hi = np.squeeze(meta['shape_hi'])

    if shape_hi != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_dwnhi2.dat'))):
            try:
                hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi2.dat',
                                         'int16', tuple(shape_hi))

            except:
                hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat',
                                         'int16', tuple(shape_hi))

            finally:
                hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat',
                                         'int16', tuple(shape_low))

                #if 'shape_low' in locals():
                #   hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi2.dat', 'int16', tuple(shape_low))

        else:
            try:
                hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat',
                                         'int16', tuple(shape_hi))

            except:
                if 'shape_low' in locals():
                    hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat',
                                             'int16', tuple(shape_low))

    if 'low_fp' in locals():
        ######### low
        Zt = remove_water(low_fp, bed, shape_low, dep_m, pix_m, 0, maxW)
        Zt = np.squeeze(Zt)

        # create memory mapped file for Z
        shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow_l.dat',
                                     'float32', Zt)
        del Zt

        #we are only going to access the portion of memory required
        low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow_l.dat',
                                  'float32', shape_low)
        Zt = correct_scans2(low_fp, TL_fp)

        # create memory mapped file for Z
        shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow_la.dat',
                                     'float32', Zt)
        del Zt

        #we are only going to access the lowion of memory required
        low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow_la.dat',
                                  'float32', shape_low)

        if doplot == 1:
            if len(np.shape(low_fp)) > 2:
                for p in xrange(len(low_fp)):
                    plot_dwnlow_scans(low_fp[p], dist_m, shape_low, ft,
                                      sonpath, p)
            else:
                plot_dwnlow_scans(low_fp, dist_m, shape_low, ft, sonpath, 0)

    if 'hi_fp' in locals():
        ######### hi
        Zt = remove_water(hi_fp, bed, shape_hi, dep_m, pix_m, 0, maxW)
        Zt = np.squeeze(Zt)

        # create memory mapped file for Z
        shape_hi = io.set_mmap_data(sonpath, base, '_data_dwnhi_l.dat',
                                    'float32', Zt)
        del Zt

        #we are only going to access the portion of memory required
        hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi_l.dat', 'float32',
                                 shape_hi)

        Zt = correct_scans2(hi_fp, TL_fp)

        # create memory mapped file for Z
        shape_hi = io.set_mmap_data(sonpath, base, '_data_dwnhi_la.dat',
                                    'float32', Zt)
        del Zt

        #we are only going to access the hiion of memory required
        hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi_la.dat',
                                 'float32', shape_hi)

        if doplot == 1:
            if len(np.shape(hi_fp)) > 2:
                for p in xrange(len(hi_fp)):
                    plot_dwnhi_scans(hi_fp[p], dist_m, shape_hi, ft, sonpath,
                                     p)
            else:
                plot_dwnhi_scans(hi_fp, dist_m, shape_hi, ft, sonpath, 0)

    if os.name == 'posix':  # true if linux/mac
        elapsed = (time.time() - start)
    else:  # windows
        elapsed = (time.clock() - start)
    print "Processing took ", elapsed, "seconds to analyse"

    print "Done!"
Ejemplo n.º 5
0
def sliding_window(a,ws,ss = None,flatten = True):
   '''
   Return a sliding window over a in any number of dimensions
   '''
   if None is ss:
      # ss was not provided. the windows will not overlap in any direction.
      ss = ws
   ws = norm_shape(ws)
   ss = norm_shape(ss)
   # convert ws, ss, and a.shape to numpy arrays
   ws = np.array(ws)
   ss = np.array(ss)

   import PyHum.io as io

   shape_tmp = io.set_mmap_data('', '', 'tmp.dat', 'float32', a)
   del a
   a = io.get_mmap_data('', '', 'tmp.dat', 'float32', shape_tmp)

   shap = np.array(a.shape)

   try:
      os.remove('tmp.dat')
   except:
      pass

   # ensure that ws, ss, and a.shape all have the same number of dimensions
   ls = [len(shap),len(ws),len(ss)]
   if 1 != len(set(ls)):
      raise ValueError(\
      'a.shape, ws and ss must all have the same length. They were %s' % str(ls))

   # ensure that ws is smaller than a in every dimension
   if np.any(ws > shap):
      raise ValueError(\
      'ws cannot be larger than a in any dimension.\
 a.shape was %s and ws was %s' % (str(a.shape),str(ws)))

   # how many slices will there be in each dimension?
   newshape = norm_shape(((shap - ws) // ss) + 1)
   # the shape of the strided array will be the number of slices in each dimension
   # plus the shape of the window (tuple addition)
   newshape += norm_shape(ws)
   # the strides tuple will be the array's strides multiplied by step size, plus

   try:
      # the array's strides (tuple addition)
      newstrides = norm_shape(np.array(a.strides) * ss) + a.strides
      a = ast(a,shape = newshape,strides = newstrides)
      if not flatten:
         return a
      # Collapse strided so that it has one more dimension than the window.  I.e.,
      # the new array is a flat list of slices.
      meat = len(ws) if ws.shape else 0
      firstdim = (int(np.product(newshape[:-meat])),) if ws.shape else ()
      dim = firstdim + (newshape[-meat:])
      # remove any dimensions with size 1
      dim = filter(lambda i : i != 1,dim)

      return a.reshape(dim), newshape

   except:

      from itertools import product
      print("memory error, windowing using slower method")
      # For each dimension, create a list of all valid slices
      slices = [[] for i in range(len(ws))]
      for i in range(len(ws)):
         nslices = ((shap[i] - ws[i]) // ss[i]) + 1
         for j in range(0,nslices):
            start = j * ss[i]
            stop = start + ws[i]
            slices[i].append(slice(start,stop))
      # Get an iterator over all valid n-dimensional slices of the input
      allslices = product(*slices)

      # Allocate memory to hold all valid n-dimensional slices
      nslices = np.product([len(s) for s in slices])
      #out = np.ndarray((nslices,) + tuple(ws),dtype = a.dtype)
      out=[]
      for i,s in enumerate(allslices):
         #out[i] = a[s]
         out.append(a[s])

      del a
      import dask.bag as db
      tmp = db.from_sequence(out, npartitions=1000)
      del out

      return tmp.compute(), newshape
Ejemplo n.º 6
0
def correct(humfile, sonpath, maxW, doplot, dofilt, correct_withwater, ph, temp, salinity, dconcfile):

    '''
    Remove water column and carry out some rudimentary radiometric corrections, 
    accounting for directivity and attenuation with range

    Syntax
    ----------
    [] = PyHum.correct(humfile, sonpath, maxW, doplot, correct_withwater, ph, temp, salinity, dconcfile)

    Parameters
    ----------
    humfile : str
       path to the .DAT file

    sonpath : str
       path where the *.SON files are

    maxW : int, *optional* [Default=1000]
       maximum transducer power

    doplot : int, *optional* [Default=1]
       1 = make plots, otherwise do not

    dofilt : int, *optional* [Default=0]
       1 = apply a phase preserving filter to the scans

    correct_withwater : int, *optional* [Default=0]
       1 = apply radiometric correction but don't remove water column from scans

    ph : float, *optional* [Default=7.0]
       water acidity in pH

    temp : float, *optional* [Default=10.0]
       water temperature in degrees Celsius

    salinity : float, *optional* [Default=0.0]
       salinity of water in parts per thousand

    dconcfile : str, *optional* [Default=None]
       file path of a text file containing sediment concentration data
       this file must contain the following fields separated by spaces:
       size (microns) conc (mg/L) dens (kg/m3)
       with one row per grain size, for example:
       30 1700 2200
       100 15 2650

    Returns
    -------
    sonpath+base+'_data_star_l.dat': memory-mapped file
        contains the starboard scan with water column removed

    sonpath+base+'_data_port_l.dat': memory-mapped file
        contains the portside scan with water column removed

    sonpath+base+'_data_star_la.dat': memory-mapped file
        contains the starboard scan with water column removed and 
        radiometrically corrected

    sonpath+base+'_data_port_la.dat': memory-mapped file
        contains the portside scan with water column removed and
        radiometrically corrected

    sonpath+base+'_data_range.dat': memory-mapped file
        contains the cosine of the range which is used to correct
        for attenuation with range

    sonpath+base+'_data_dwnlow_l.dat': memory-mapped file
        contains the low freq. downward scan with water column removed

    sonpath+base+'_data_dwnhi_l.dat': memory-mapped file
        contains the high freq. downward  scan with water column removed

    sonpath+base+'_data_dwnlow_la.dat': memory-mapped file
        contains the low freq. downward  scan with water column removed and 
        radiometrically corrected

    sonpath+base+'_data_dwnhi_la.dat': memory-mapped file
        contains the high freq. downward  scan with water column removed and
        radiometrically corrected
    
    if correct_withwater == 1:
    
       sonpath+base+'_data_star_lw.dat': memory-mapped file
           contains the starboard scan with water column retained and 
           radiometrically corrected

       sonpath+base+'_data_port_lw.dat': memory-mapped file
           contains the portside scan with water column retained and
           radiometrically corrected

    '''

    # prompt user to supply file if no input file given
    if not humfile:
      print('An input file is required!!!!!!')
      Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
      inputfile = askopenfilename(filetypes=[("DAT files","*.DAT")]) 

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
      print('A *.SON directory is required!!!!!!')
      Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
      sonpath = askdirectory() 

    # print given arguments to screen and convert data type where necessary
    if humfile:
      print('Input file is %s' % (humfile))

    if sonpath:
      print('Sonar file path is %s' % (sonpath))

    if maxW:
      maxW = np.asarray(maxW,float)
      print('Max. transducer power is %s W' % (str(maxW)))

    if doplot:
      doplot = int(doplot)
      if doplot==0:
         print("Plots will not be made")

    if dofilt:
      dofilt = int(dofilt)
      if dofilt==0:
         print("Phase preserving filter will not be applied")
      else:
         print("Phase preserving filter will be applied")

    if correct_withwater:
      correct_withwater = int(correct_withwater)
      if correct_withwater==1:
         print("Correction will be applied without removing water column")

    if salinity:
       salinity = np.asarray(salinity,float)
       print('Salinity is %s ppt' % (str(salinity)))

    if ph:
       ph = np.asarray(ph,float)
       print('pH is %s' % (str(ph)))

    if temp:
       temp = np.asarray(temp,float)
       print('Temperature is %s' % (str(temp)))

    if dconcfile is not None:
       try:
          print('Suspended sediment size/conc. file is %s' % (dconcfile))
          dconc = np.genfromtxt(dconcfile).T
          conc = dconc[1]
          dens = dconc[2]
          d = dconc[0]
       except:
          pass

    #================================
    # start timer
    if os.name=='posix': # true if linux/mac or cygwin on windows
       start = time.time()
    else: # windows
       start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1]!=os.sep:
       sonpath = sonpath + os.sep

    base = humfile.split('.DAT') # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)   

    # add wattage to metadata dict 
    meta = loadmat(os.path.normpath(os.path.join(sonpath,base+'meta.mat')))

    dep_m = meta['dep_m'][0]
    pix_m = meta['pix_m'][0]

    meta['maxW'] = maxW
    savemat(os.path.normpath(os.path.join(sonpath,base+'meta.mat')), meta ,oned_as='row')

    bed = np.squeeze(meta['bed'])
    ft = 1/(meta['pix_m'])
    dist_m = np.squeeze(meta['dist_m'])

    try:
       if dconcfile is not None:
          # sediment attenuation
          alpha = sed_atten(meta['f'],conc,dens,d,meta['c'])
       else:
          alpha = 0
    except:
       alpha = 0

    # load memory mapped scans
    shape_port = np.squeeze(meta['shape_port'])
    if shape_port!='':
       
       if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_port2.dat'))):
          port_fp = io.get_mmap_data(sonpath, base, '_data_port2.dat', 'int16', tuple(shape_port))

       else:
          port_fp = io.get_mmap_data(sonpath, base, '_data_port.dat', 'int16', tuple(shape_port))       

    shape_star = np.squeeze(meta['shape_star'])
    if shape_star!='':
       if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_star2.dat'))):
          star_fp = io.get_mmap_data(sonpath, base, '_data_star2.dat', 'int16', tuple(shape_star))       

       else:
          star_fp = io.get_mmap_data(sonpath, base, '_data_star.dat', 'int16', tuple(shape_star))        

    if len(shape_star)==2:
       extent = shape_star[0] 
    else:
       extent = shape_star[1] #np.shape(data_port)[0]

    bed = np.asarray(bed,'int')+int(0.25*ft)

    # calculate in dB
    ######### star
    Zt, R, A = remove_water(star_fp, bed, shape_star, dep_m, pix_m, 1,  maxW)

    Zt = np.squeeze(Zt)
    
    # create memory mapped file for Z)
    shape_star = io.set_mmap_data(sonpath, base, '_data_star_l.dat', 'float32', Zt)    
    del Zt
    
    A = np.squeeze(A)
    # create memory mapped file for A
    shape_A = io.set_mmap_data(sonpath, base, '_data_incidentangle.dat', 'float32', A)         
    del A

    R = np.squeeze(R)
    R[np.isnan(R)] = 0

    try:
       alpha_w = water_atten(R,meta['f'],meta['c'], ph, temp, salinity)
    except:
       alpha_w = 1e-5

    # compute transmission losses
    TL = (40 * np.log10(R) + alpha_w + (2*alpha)*R/1000)/255
    del alpha_w

    # create memory mapped file for R
    shape_R = io.set_mmap_data(sonpath, base, '_data_range.dat', 'float32', R)  
    del R 
    
    TL[np.isnan(TL)] = 0
    TL[TL<0] = 0
    shape_TL = io.set_mmap_data(sonpath, base, '_data_TL.dat', 'float32', TL)     
    del TL      

    A_fp = io.get_mmap_data(sonpath, base, '_data_incidentangle.dat', 'float32', shape_star)
    TL_fp = io.get_mmap_data(sonpath, base, '_data_TL.dat', 'float32', shape_star)

    R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32', shape_star)
        
    if correct_withwater == 1:
       Zt = correct_scans(star_fp, A_fp, TL_fp, dofilt)

       # create memory mapped file for Z)
       shape_star = io.set_mmap_data(sonpath, base, '_data_star_lw.dat', 'float32', Zt)       

    #we are only going to access the portion of memory required
    star_fp = io.get_mmap_data(sonpath, base, '_data_star_l.dat', 'float32', shape_star)     

    ##Zt = correct_scans(star_fp, A_fp, TL_fp, dofilt)
 
    #phi=1.69
    alpha=59 # vertical beam width at 3db
    theta=35 #opening angle theta 
    
    # lambertian correction
    Zt = correct_scans_lambertian(star_fp, A_fp, TL_fp, R_fp, meta['c'], meta['f'], theta, alpha)
    
    Zt = np.squeeze(Zt)

    avg = np.nanmedian(Zt,axis=0)
    ##avg = median_filter(avg,int(len(avg)/10))
    
    Zt2 = Zt-avg + np.nanmean(avg)
    Zt2 = Zt2 + np.abs(np.nanmin(Zt2))

    try:
       Zt2 = median_filter(Zt2, (3,3))
    except:
       pass

    ##Zt2 = np.empty(np.shape(Zt)) 
    ##for kk in range(np.shape(Zt)[1]):
    ##   Zt2[:,kk] = (Zt[:,kk] - avg) + np.nanmean(avg)
    ##Zt2[Zt<=0] = np.nan
    ##Zt2[Zt2<=0] = np.nan    
    del Zt
    
    # create memory mapped file for Z
    shape_star = io.set_mmap_data(sonpath, base, '_data_star_la.dat', 'float32', Zt2)
    del Zt2    
    
    #we are only going to access the portion of memory required
    star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat', 'float32', shape_star) 

    ######### port
    if correct_withwater == 1:
       Zt = correct_scans(port_fp, A_fp, TL, dofilt)

       # create memory mapped file for Z)
       shape_port = io.set_mmap_data(sonpath, base, '_data_port_lw.dat', 'float32', Zt)        

    Zt = remove_water(port_fp, bed, shape_port, dep_m, pix_m, 0,  maxW)

    Zt = np.squeeze(Zt)
    
    # create memory mapped file for Z
    shape_port = io.set_mmap_data(sonpath, base, '_data_port_l.dat', 'float32', Zt)       

    #we are only going to access the portion of memory required
    port_fp = io.get_mmap_data(sonpath, base, '_data_port_l.dat', 'float32', shape_port)     
    
    ##Zt = correct_scans(port_fp, A_fp, TL_fp, dofilt)
    
    # lambertian correction
    Zt = correct_scans_lambertian(port_fp, A_fp, TL_fp, R_fp, meta['c'], meta['f'], theta, alpha)
    
    Zt = np.squeeze(Zt)
    
    Zt2 = Zt-avg + np.nanmean(avg)
    Zt2 = Zt2 + np.abs(np.nanmin(Zt2))

    ##Zt2 = np.empty(np.shape(Zt))
    ##for kk in range(np.shape(Zt)[1]):
    ##   Zt2[:,kk] = (Zt[:,kk] - avg) + np.nanmean(avg)
    ##Zt2[Zt<=0] = np.nan
    ##Zt2[Zt2<=0] = np.nan    
    del Zt
        
    # create memory mapped file for Z
    shape_port = io.set_mmap_data(sonpath, base, '_data_port_la.dat', 'float32', Zt2)       
    del Zt2

    #we are only going to access the portion of memory required
    port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat', 'float32', shape_port) 

    ## do plots of merged scans
    if doplot==1:
       if correct_withwater == 1:

          port_fpw = io.get_mmap_data(sonpath, base, '_data_port_lw.dat', 'float32', shape_port) 

          star_fpw = io.get_mmap_data(sonpath, base, '_data_star_lw.dat', 'float32', shape_star) 
          
          if len(np.shape(star_fpw))>2:
             for p in range(len(star_fpw)):
                plot_merged_scans(port_fpw[p], star_fpw[p], dist_m, shape_port, ft, sonpath, p)
          else:
             plot_merged_scans(port_fpw, star_fpw, dist_m, shape_port, ft, sonpath, 0)

       else:

          if len(np.shape(star_fp))>2:
             for p in range(len(star_fp)):
                plot_merged_scans(port_fp[p], star_fp[p], dist_m, shape_port, ft, sonpath, p)
          else:
             plot_merged_scans(port_fp, star_fp, dist_m, shape_port, ft, sonpath, 0)


    # load memory mapped scans
    shape_low = np.squeeze(meta['shape_low'])
    shape_hi = np.squeeze(meta['shape_hi'])
    
    if shape_low!='':
       if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_dwnlow2.dat'))):
          try:
             low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow2.dat', 'int16', tuple(shape_low))           

          except:
             low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat', 'int16', tuple(shape_low)) 

          finally:
             low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat', 'int16', tuple(shape_hi))              

             #if 'shape_hi' in locals():
             #   low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow2.dat', 'int16', tuple(shape_hi))              

       else:

          try:
             low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat', 'int16', tuple(shape_low))           

          except:
             if 'shape_hi' in locals():
                low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat', 'int16', tuple(shape_hi))              

    shape_hi = np.squeeze(meta['shape_hi'])

    if shape_hi!='':
       if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_dwnhi2.dat'))):
          try:
             hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi2.dat', 'int16', tuple(shape_hi))           

          except:
             hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat', 'int16', tuple(shape_hi)) 

          finally:
             hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat', 'int16', tuple(shape_low))              


             #if 'shape_low' in locals():
             #   hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi2.dat', 'int16', tuple(shape_low))               

       else:
          try:
             hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat', 'int16', tuple(shape_hi))            

          except:
             if 'shape_low' in locals():
                hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat', 'int16', tuple(shape_low))               


    if 'low_fp' in locals():
       ######### low
       Zt = remove_water(low_fp, bed, shape_low, dep_m, pix_m, 0,  maxW)
       Zt = np.squeeze(Zt)

       # create memory mapped file for Z
       shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow_l.dat', 'float32', Zt)       
       del Zt   

       #we are only going to access the portion of memory required
       low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow_l.dat', 'float32', shape_low)         
       Zt = correct_scans2(low_fp, TL_fp)

       # create memory mapped file for Z
       shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow_la.dat', 'float32', Zt)    
       del Zt    

       #we are only going to access the lowion of memory required
       low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow_la.dat', 'float32', shape_low)        
       
       if doplot==1:
          if len(np.shape(low_fp))>2:
             for p in range(len(low_fp)):
                plot_dwnlow_scans(low_fp[p], dist_m, shape_low, ft, sonpath, p)
          else:
             plot_dwnlow_scans(low_fp, dist_m, shape_low, ft, sonpath, 0)

    if 'hi_fp' in locals():
       ######### hi
       Zt = remove_water(hi_fp, bed, shape_hi, dep_m, pix_m, 0,  maxW)
       Zt = np.squeeze(Zt)

       # create memory mapped file for Z
       shape_hi = io.set_mmap_data(sonpath, base, '_data_dwnhi_l.dat', 'float32', Zt) 
       del Zt       

       #we are only going to access the portion of memory required
       hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi_l.dat', 'float32', shape_hi)        

       Zt = correct_scans2(hi_fp, TL_fp)

       # create memory mapped file for Z
       shape_hi = io.set_mmap_data(sonpath, base, '_data_dwnhi_la.dat', 'float32', Zt)     
       del Zt  

       #we are only going to access the hiion of memory required
       hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi_la.dat', 'float32', shape_hi)        
       
       if doplot==1:
          if len(np.shape(hi_fp))>2:
             for p in range(len(hi_fp)):
                plot_dwnhi_scans(hi_fp[p], dist_m, shape_hi, ft, sonpath, p)
          else:
             plot_dwnhi_scans(hi_fp, dist_m, shape_hi, ft, sonpath, 0)

    if os.name=='posix': # true if linux/mac
       elapsed = (time.time() - start)
    else: # windows
       elapsed = (time.clock() - start)
    print("Processing took "+ str(elapsed) + "seconds to analyse")

    print("Done!")
    print("===================================================")
Ejemplo n.º 7
0
def read(humfile, sonpath, cs2cs_args, c, draft, doplot, t, bedpick, flip_lr, model, calc_bearing, filt_bearing, chunk): #cog = 1,

    '''
    Read a .DAT and associated set of .SON files recorded by a Humminbird(R)
    instrument.

    Parse the data into a set of memory mapped files that will
    subsequently be used by the other functions of the PyHum module.

    Export time-series data and metadata in other formats.

    Create a kml file for visualising boat track

    Syntax
    ----------
    [] = PyHum.read(humfile, sonpath, cs2cs_args, c, draft, doplot, t, bedpick, flip_lr, chunksize, model, calc_bearing, filt_bearing, chunk)

    Parameters
    ------------
    humfile : str
       path to the .DAT file
    sonpath : str
       path where the *.SON files are
    cs2cs_args : int, *optional* [Default="epsg:26949"]
       arguments to create coordinates in a projected coordinate system
       this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates
       into any projection supported by the proj.4 libraries
    c : float, *optional* [Default=1450.0]
       speed of sound in water (m/s). Defaults to a value of freshwater
    draft : float, *optional* [Default=0.3]
       draft from water surface to transducer face (m)
    doplot : float, *optional* [Default=1]
       if 1, plots will be made
    t : float, *optional* [Default=0.108]
       length of transducer array (m).
       Default value is that of the 998 series Humminbird(R)
    bedpick : int, *optional* [Default=1]
       if 1, bedpicking with be carried out automatically
       if 0, user will be prompted to pick the bed location on screen
    flip_lr : int, *optional* [Default=0]
       if 1, port and starboard scans will be flipped
       (for situations where the transducer is flipped 180 degrees)
    model: int, *optional* [Default=998]
       A 3 or 4 number code indicating the model number
       Examples: 998, 997, 1198, 1199
    calc_bearing : float, *optional* [Default=0]
       if 1, bearing will be calculated from coordinates
    filt_bearing : float, *optional* [Default=0]
       if 1, bearing will be filtered
    chunk : str, *optional* [Default='d100' (distance, 100 m)]
       letter, followed by a number.
       There are the following letter options:
       'd' - parse chunks based on distance, then number which is distance in m
       'p' - parse chunks based on number of pings, then number which is number of pings
       'h' - parse chunks based on change in heading, then number which is the change in heading in degrees
       '1' - process just 1 chunk

    Returns
    ---------
    sonpath+base+'_data_port.dat': memory-mapped file
        contains the raw echogram from the port side
        sidescan sonar (where present)

    sonpath+base+'_data_port.dat': memory-mapped file
        contains the raw echogram from the starboard side
        sidescan sonar (where present)

    sonpath+base+'_data_dwnhi.dat': memory-mapped file
        contains the raw echogram from the high-frequency
        echosounder (where present)

    sonpath+base+'_data_dwnlow.dat': memory-mapped file
        contains the raw echogram from the low-frequency
        echosounder (where present)

    sonpath+base+"trackline.kml": google-earth kml file
        contains the trackline of the vessel during data
        acquisition

    sonpath+base+'rawdat.csv': comma separated value file
        contains time-series data. columns corresponding to
        longitude
        latitude
        easting (m)
        northing (m)
        depth to bed (m)
        alongtrack cumulative distance (m)
        vessel heading (deg.)

    sonpath+base+'meta.mat': .mat file
        matlab format file containing a dictionary object
        holding metadata information. Fields are:
        e : ndarray, easting (m)
        n : ndarray, northing (m)
        es : ndarray, low-pass filtered easting (m)
        ns : ndarray, low-pass filtered northing (m)
        lat : ndarray, latitude
        lon : ndarray, longitude
        shape_port : tuple, shape of port scans in memory mapped file
        shape_star : tuple, shape of starboard scans in memory mapped file
        shape_hi : tuple, shape of high-freq. scans in memory mapped file
        shape_low : tuple, shape of low-freq. scans in memory mapped file
        dep_m : ndarray, depth to bed (m)
        dist_m : ndarray, distance along track (m)
        heading : ndarray, heading of vessel (deg. N)
        pix_m: float, size of 1 pixel in across-track dimension (m)
        bed : ndarray, depth to bed (m)
        c : float, speed of sound in water (m/s)
        t : length of sidescan transducer array (m)
        spd : ndarray, vessel speed (m/s)
        time_s : ndarray, time elapsed (s)
        caltime : ndarray, unix epoch time (s)
    '''

    # prompt user to supply file if no input file given
    if not humfile:
      print('An input file is required!!!!!!')
      Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
      humfile = askopenfilename(filetypes=[("DAT files","*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
      print('A *.SON directory is required!!!!!!')
      Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
      sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
      print('Input file is %s' % (humfile))

    if sonpath:
      print('Son files are in %s' % (sonpath))

    if cs2cs_args:
      print('cs2cs arguments are %s' % (cs2cs_args))

    if draft:
      draft = float(draft)
      print('Draft: %s' % (str(draft)))

    if c:
      c = float(c)
      print('Celerity of sound: %s m/s' % (str(c)))

    if doplot:
      doplot = int(doplot)
      if doplot==0:
         print("Plots will not be made")

    if flip_lr:
      flip_lr = int(flip_lr)
      if flip_lr==1:
         print("Port and starboard will be flipped")

    if t:
      t = np.asarray(t,float)
      print('Transducer length is %s m' % (str(t)))

    if bedpick:
      bedpick = np.asarray(bedpick,int)
      if bedpick==1:
         print('Bed picking is auto')
      elif bedpick==0:
         print('Bed picking is manual')
      else:
         print('User will be prompted per chunk about bed picking method')

    if chunk:
       chunk = str(chunk)
       if chunk[0]=='d':
          chunkmode=1
          chunkval = int(chunk[1:])
          print('Chunks based on distance of %s m' % (str(chunkval)))
       elif chunk[0]=='p':
          chunkmode=2
          chunkval = int(chunk[1:])
          print('Chunks based on %s pings' % (str(chunkval)))
       elif chunk[0]=='h':
          chunkmode=3
          chunkval = int(chunk[1:])
          print('Chunks based on heading devation of %s degrees' % (str(chunkval)))
       elif chunk[0]=='1':
          chunkmode=4
          chunkval = 1
          print('Only 1 chunk will be produced')
       else:
          print("Chunk mode not understood - should be 'd', 'p', or 'h' - using defaults")
          chunkmode=1
          chunkval = 100
          print('Chunks based on distance of %s m' % (str(chunkval)))

    if model:
       try:
          model = int(model)
          print("Data is from the %s series"  % (str(model)))
       except:
          if model=='onix':
             model=0
             print("Data is from the ONIX series")
          elif model=='helix':
             model=1
             print("Data is from the HELIX series")
          elif model=='mega':
             model=2
             print("Data is from the MEGA series")
#    if cog:
#       cog = int(cog)
#       if cog==1:
#          print "Heading based on course-over-ground"

    if calc_bearing:
       calc_bearing = int(calc_bearing)
       if calc_bearing==1:
          print("Bearing will be calculated from coordinates")

    if filt_bearing:
       filt_bearing = int(filt_bearing)
       if filt_bearing==1:
          print("Bearing will be filtered")

    ## for debugging
    #humfile = r"test.DAT"; sonpath = "test_data"
    #cs2cs_args = "epsg:26949"; doplot = 1; draft = 0
    #c=1450; bedpick=1; fliplr=1; chunk = 'd100'
    #model=998; cog=1; calc_bearing=0; filt_bearing=0

    #if model==2:
    #   f = 1000
    #else:
    f = 455

    try:
       print("Checking the epsg code you have chosen for compatibility with Basemap ... ")
       from mpl_toolkits.basemap import Basemap
       m = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1],
          resolution = 'i', llcrnrlon=10, llcrnrlat=10, urcrnrlon=30, urcrnrlat=30)
       del m
       print("... epsg code compatible")
    except (ValueError):
       print("Error: the epsg code you have chosen is not compatible with Basemap")
       print("please choose a different epsg code (http://spatialreference.org/)")
       print("program will now close")
       sys.exit()

    # start timer
    if os.name=='posix': # true if linux/mac or cygwin on windows
       start = time.time()
    else: # windows
       start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1]!=os.sep:
       sonpath = sonpath + os.sep

    # get the SON files from this directory
    sonfiles = glob.glob(sonpath+'*.SON')
    if not sonfiles:
        sonfiles = glob.glob(os.getcwd()+os.sep+sonpath+'*.SON')

    base = humfile.split('.DAT') # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    print("WARNING: Because files have to be read in byte by byte,")
    print("this could take a very long time ...")

    #reading each sonfile in parallel should be faster ...
    try:
       o = Parallel(n_jobs = np.min([len(sonfiles), cpu_count()]), verbose=0)(delayed(getscans)(sonfiles[k], humfile, c, model, cs2cs_args) for k in range(len(sonfiles)))
       X, Y, A, B = zip(*o)

       for k in range(len(Y)):
          if Y[k] == 'sidescan_port':
             dat = A[k] #data.gethumdat()
             metadat = B[k] #data.getmetadata()
             if flip_lr==0:
                data_port = X[k].astype('int16')
             else:
                data_star = X[k].astype('int16')

          elif Y[k] == 'sidescan_starboard':
             if flip_lr==0:
                data_star = X[k].astype('int16')
             else:
                data_port = X[k].astype('int16')

          elif Y[k] == 'down_lowfreq':
             data_dwnlow = X[k].astype('int16')

          elif Y[k] == 'down_highfreq':
             data_dwnhi = X[k].astype('int16')

          elif Y[k] == 'down_vhighfreq': #hopefully this only applies to mega systems
             data_dwnhi = X[k].astype('int16')

       del X, Y, A, B, o
       old_pyread = 0

       if 'data_port' not in locals():
          data_port = ''
          print("portside scan not available")

       if 'data_star' not in locals():
          data_star = ''
          print("starboardside scan not available")

       if 'data_dwnhi' not in locals():
          data_dwnlow = ''
          print("high-frq. downward scan not available")

       if 'data_dwnlow' not in locals():
          data_dwnlow = ''
          print("low-frq. downward scan not available")

    except: # revert back to older version if paralleleised version fails

       print("something went wrong with the parallelised version of pyread ...")

       try:
          import pyread
       except:
          from . import pyread

       data = pyread.pyread(sonfiles, humfile, c, model, cs2cs_args)

       dat = data.gethumdat()

       metadat = data.getmetadata()

       old_pyread = 1

    nrec = len(metadat['n'])

    metadat['instr_heading'] = metadat['heading'][:nrec]

    #metadat['heading'] = humutils.get_bearing(calc_bearing, filt_bearing, cog, metadat['lat'], metadat['lon'], metadat['instr_heading'])

    try:
       es = humutils.runningMeanFast(metadat['e'][:nrec],len(metadat['e'][:nrec])/100)
       ns = humutils.runningMeanFast(metadat['n'][:nrec],len(metadat['n'][:nrec])/100)
    except:
       es = metadat['e'][:nrec]
       ns = metadat['n'][:nrec]

    metadat['es'] = es
    metadat['ns'] = ns

    try:
       trans =  pyproj.Proj(init=cs2cs_args)
    except:
       trans =  pyproj.Proj(cs2cs_args.lstrip(), inverse=True)

    lon, lat = trans(es, ns, inverse=True)
    metadat['lon'] = lon
    metadat['lat'] = lat

    metadat['heading'] = humutils.get_bearing(calc_bearing, filt_bearing, metadat['lat'], metadat['lon'], metadat['instr_heading']) #cog

    dist_m = humutils.get_dist(lat, lon)
    metadat['dist_m'] = dist_m

    if calc_bearing==1: # recalculate speed, m/s
       ds=np.gradient(np.squeeze(metadat['time_s']))
       dx=np.gradient(np.squeeze(metadat['dist_m']))
       metadat['spd'] = dx[:nrec]/ds[:nrec]

    # theta at 3dB in the horizontal
    theta3dB = np.arcsin(c/(t*(f*1000)))
    #resolution of 1 sidescan pixel to nadir
    ft = (np.pi/2)*(1/theta3dB) #/ (f/455)

    dep_m = humutils.get_depth(metadat['dep_m'][:nrec])

    if old_pyread == 1: #older pyread version

       # port scan
       try:
          if flip_lr==0:
             data_port = data.getportscans().astype('int16')
          else:
             data_port = data.getstarscans().astype('int16')
       except:
          data_port = ''
          print("portside scan not available")

    if data_port!='':

       Zt, ind_port = makechunks_scan(chunkmode, chunkval, metadat, data_port, 0)
       del data_port

       ## create memory mapped file for Z
       shape_port = io.set_mmap_data(sonpath, base, '_data_port.dat', 'int16', Zt)

       ##we are only going to access the portion of memory required
       port_fp = io.get_mmap_data(sonpath, base, '_data_port.dat', 'int16', shape_port)

    if old_pyread == 1: #older pyread version
       # starboard scan
       try:
          if flip_lr==0:
             data_star = data.getstarscans().astype('int16')
          else:
             data_star = data.getportscans().astype('int16')
       except:
          data_star = ''
          print("starboardside scan not available")

    if data_star!='':

       Zt, ind_star = makechunks_scan(chunkmode, chunkval, metadat, data_star, 1)
       del data_star

       # create memory mapped file for Z
       shape_star = io.set_mmap_data(sonpath, base, '_data_star.dat', 'int16', Zt)

       star_fp = io.get_mmap_data(sonpath, base, '_data_star.dat', 'int16', shape_star)

    if 'star_fp' in locals() and 'port_fp' in locals():
       # check that port and starboard are same size
       # and trim if not
       if np.shape(star_fp)!=np.shape(port_fp):
          print("port and starboard scans are different sizes ... rectifying")
          if np.shape(port_fp[0])[1] > np.shape(star_fp[0])[1]:
             tmp = port_fp.copy()
             tmp2 = np.empty_like(star_fp)
             for k in range(len(tmp)):
                 tmp2[k] = tmp[k][:,:np.shape(star_fp[k])[1]]
             del tmp

             # create memory mapped file for Z
             shape_port = io.set_mmap_data(sonpath, base, '_data_port2.dat', 'int16', tmp2)
             #shape_star = shape_port.copy()
             shape_star = tuple(np.asarray(shape_port).copy())

             ##we are only going to access the portion of memory required
             port_fp = io.get_mmap_data(sonpath, base, '_data_port2.dat', 'int16', shape_port)

             ind_port = list(ind_port)
             ind_port[-1] = np.shape(star_fp[0])[1]
             ind_port = tuple(ind_port)

          elif np.shape(port_fp[0])[1] < np.shape(star_fp[0])[1]:
             tmp = star_fp.copy()
             tmp2 = np.empty_like(port_fp)
             for k in range(len(tmp)):
                 tmp2[k] = tmp[k][:,:np.shape(port_fp[k])[1]]
             del tmp

             # create memory mapped file for Z
             shape_port = io.set_mmap_data(sonpath, base, '_data_star2.dat', 'int16', tmp2)
             #shape_star = shape_port.copy()
             shape_star = tuple(np.asarray(shape_port).copy())

             #we are only going to access the portion of memory required
             star_fp = io.get_mmap_data(sonpath, base, '_data_star2.dat', 'int16', shape_star)

             ind_star = list(ind_star)
             ind_star[-1] = np.shape(port_fp[0])[1]
             ind_star = tuple(ind_star)

    if old_pyread == 1: #older pyread version
       # low-freq. sonar
       try:
          data_dwnlow = data.getlowscans().astype('int16')
       except:
          data_dwnlow = ''
          print("low-freq. scan not available")

    if data_dwnlow!='':

       Zt, ind_low = makechunks_scan(chunkmode, chunkval, metadat, data_dwnlow, 2)
       del data_dwnlow

       # create memory mapped file for Z
       shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow.dat', 'int16', Zt)

       ##we are only going to access the portion of memory required
       dwnlow_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat', 'int16', shape_low)

    if old_pyread == 1: #older pyread version
       # hi-freq. sonar
       try:
          data_dwnhi = data.gethiscans().astype('int16')
       except:
          data_dwnhi = ''
          print("high-freq. scan not available")

    if data_dwnhi!='':

       Zt, ind_hi = makechunks_scan(chunkmode, chunkval, metadat, data_dwnhi, 3)
       del data_dwnhi

       # create memory mapped file for Z
       shape_hi = io.set_mmap_data(sonpath, base, '_data_dwnhi.dat', 'int16', Zt)

       dwnhi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat', 'int16', shape_hi)

    if 'dwnhi_fp' in locals() and 'dwnlow_fp' in locals():
       # check that low and high are same size
       # and trim if not
       if (np.shape(dwnhi_fp)!=np.shape(dwnlow_fp)) and (chunkmode!=4):
          print("dwnhi and dwnlow are different sizes ... rectifying")
          if np.shape(dwnhi_fp[0])[1] > np.shape(dwnlow_fp[0])[1]:
             tmp = dwnhi_fp.copy()
             tmp2 = np.empty_like(dwnlow_fp)
             for k in range(len(tmp)):
                 tmp2[k] = tmp[k][:,:np.shape(dwnlow_fp[k])[1]]
             del tmp

             # create memory mapped file for Z
             shape_low = io.set_mmap_data(sonpath, base, '_data_dwnhi2.dat', 'int16', tmp2)
             #shape_hi = shape_low.copy()
             shape_hi = tuple(np.asarray(shape_low).copy())

             ##we are only going to access the portion of memory required
             dwnhi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi2.dat', 'int16', shape_hi)

             ind_hi = list(ind_hi)
             ind_hi[-1] = np.shape(dwnlow_fp[0])[1]
             ind_hi = tuple(ind_hi)

          elif np.shape(dwnhi_fp[0])[1] < np.shape(dwnlow_fp[0])[1]:
             tmp = dwnlow_fp.copy()
             tmp2 = np.empty_like(dwnhi_fp)
             for k in range(len(tmp)):
                 tmp2[k] = tmp[k][:,:np.shape(dwnhi_fp[k])[1]]
             del tmp

             # create memory mapped file for Z
             shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow2.dat', 'int16', tmp2)
             #shape_hi = shape_low.copy()
             shape_hi = tuple(np.asarray(shape_low).copy())

             ##we are only going to access the portion of memory required
             dwnlow_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow2.dat', 'int16', shape_low)

             ind_low = list(ind_low)
             ind_low[-1] = np.shape(dwnhi_fp[0])[1]
             ind_low = tuple(ind_low)

    if old_pyread == 1: #older pyread version
       del data

    if ('shape_port' in locals()) and (chunkmode!=4):
       metadat['shape_port'] = shape_port
       nrec = metadat['shape_port'][0] * metadat['shape_port'][2]
    elif ('shape_port' in locals()) and (chunkmode==4):
       metadat['shape_port'] = shape_port
       nrec = metadat['shape_port'][1]
    else:
       metadat['shape_port'] = ''

    if ('shape_star' in locals()) and (chunkmode!=4):
       metadat['shape_star'] = shape_star
       nrec = metadat['shape_star'][0] * metadat['shape_star'][2]
    elif ('shape_star' in locals()) and (chunkmode==4):
       metadat['shape_star'] = shape_star
       nrec = metadat['shape_star'][1]
    else:
       metadat['shape_star'] = ''

    if ('shape_hi' in locals()) and (chunkmode!=4):
       metadat['shape_hi'] = shape_hi
       #nrec = metadat['shape_hi'][0] * metadat['shape_hi'][2] * 2
    elif ('shape_hi' in locals()) and (chunkmode==4):
       metadat['shape_hi'] = shape_hi
    else:
       metadat['shape_hi'] = ''

    if ('shape_low' in locals()) and (chunkmode!=4):
       metadat['shape_low'] = shape_low
       #nrec = metadat['shape_low'][0] * metadat['shape_low'][2] * 2
    elif ('shape_low' in locals()) and (chunkmode==4):
       metadat['shape_low'] = shape_low
    else:
       metadat['shape_low'] = ''

    #make kml boat trackline
    humutils.make_trackline(lon,lat, sonpath, base)

    if 'port_fp' in locals() and 'star_fp' in locals():

       #if not os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'meta.mat'))):
       if 2>1:
          if bedpick == 1: # auto

             x, bed = humutils.auto_bedpick(ft, dep_m, chunkmode, port_fp, c)

             if len(dist_m)<len(bed):
                dist_m = np.append(dist_m,dist_m[-1]*np.ones(len(bed)-len(dist_m)))

             if doplot==1:
                if chunkmode!=4:
                   for k in range(len(star_fp)):
                      plot_2bedpicks(port_fp[k], star_fp[k], bed[ind_port[-1]*k:ind_port[-1]*(k+1)], dist_m[ind_port[-1]*k:ind_port[-1]*(k+1)], x[ind_port[-1]*k:ind_port[-1]*(k+1)], ft, shape_port, sonpath, k, chunkmode)
                else:
                   plot_2bedpicks(port_fp, star_fp, bed, dist_m, x, ft, shape_port, sonpath, 0, chunkmode)

             # 'real' bed is estimated to be the minimum of the two
             bed = np.min(np.vstack((bed[:nrec],np.squeeze(x[:nrec]))),axis=0)
             bed = humutils.runningMeanFast(bed, 3)

          elif bedpick>1: # user prompt

             x, bed = humutils.auto_bedpick(ft, dep_m, chunkmode, port_fp, c)

             if len(dist_m)<len(bed):
                dist_m = np.append(dist_m,dist_m[-1]*np.ones(len(bed)-len(dist_m)))

             # 'real' bed is estimated to be the minimum of the two
             bed = np.min(np.vstack((bed[:nrec],np.squeeze(x[:nrec]))),axis=0)
             bed = humutils.runningMeanFast(bed, 3)

             # manually intervene
             fig = plt.figure()
             ax = plt.gca()
             if chunkmode !=4:
                im = ax.imshow(np.hstack(port_fp), cmap = 'gray', origin = 'upper')
             else:
                im = ax.imshow(port_fp, cmap = 'gray', origin = 'upper')
             plt.plot(bed,'r')
             plt.axis('normal'); plt.axis('tight')

             pts1 = plt.ginput(n=300, timeout=30) # it will wait for 200 clicks or 60 seconds
             x1=map(lambda x: x[0],pts1) # map applies the function passed as
             y1=map(lambda x: x[1],pts1) # first parameter to each element of pts
             plt.close()
             del fig

             if x1 != []: # if x1 is not empty
                tree = KDTree(zip(np.arange(1,len(bed)), bed))
                try:
                   dist, inds = tree.query(zip(x1, y1), k = 100, eps=5, n_jobs=-1)
                except:
                   dist, inds = tree.query(zip(x1, y1), k = 100, eps=5)

                b = np.interp(inds,x1,y1)
                bed2 = bed.copy()
                bed2[inds] = b
                bed = bed2

             if doplot==1:
                if chunkmode!=4:
                   for k in range(len(star_fp)):
                      plot_2bedpicks(port_fp[k], star_fp[k], bed[ind_port[-1]*k:ind_port[-1]*(k+1)], dist_m[ind_port[-1]*k:ind_port[-1]*(k+1)], x[ind_port[-1]*k:ind_port[-1]*(k+1)], ft, shape_port, sonpath, k, chunkmode)
                else:
                   plot_2bedpicks(port_fp, star_fp, bed, dist_m, x, ft, shape_port, sonpath, 0, chunkmode)

          else: #manual

             beds=[]

             if chunkmode!=4:
                for k in range(len(port_fp)):
                   raw_input("Bed picking "+str(k+1)+" of "+str(len(port_fp))+", are you ready? 30 seconds. Press Enter to continue...")
                   bed={}
                   fig = plt.figure()
                   ax = plt.gca()
                   im = ax.imshow(port_fp[k], cmap = 'gray', origin = 'upper')
                   pts1 = plt.ginput(n=300, timeout=30) # it will wait for 200 clicks or 60 seconds
                   x1=map(lambda x: x[0],pts1) # map applies the function passed as
                   y1=map(lambda x: x[1],pts1) # first parameter to each element of pts
                   bed = np.interp(np.r_[:ind_port[-1]],x1,y1)
                   plt.close()
                   del fig
                   beds.append(bed)
                   extent = np.shape(port_fp[k])[0]
                bed = np.asarray(np.hstack(beds),'float')
             else:
                raw_input("Bed picking - are you ready? 30 seconds. Press Enter to continue...")
                bed={}
                fig = plt.figure()
                ax = plt.gca()
                im = ax.imshow(port_fp, cmap = 'gray', origin = 'upper')
                pts1 = plt.ginput(n=300, timeout=30) # it will wait for 200 clicks or 60 seconds
                x1=map(lambda x: x[0],pts1) # map applies the function passed as
                y1=map(lambda x: x[1],pts1) # first parameter to each element of pts
                bed = np.interp(np.r_[:ind_port[-1]],x1,y1)
                plt.close()
                del fig
                beds.append(bed)
                extent = np.shape(port_fp)[1]
                bed = np.asarray(np.hstack(beds),'float')

          # now revise the depth in metres
          dep_m = (1/ft)*bed

          if doplot==1:
             if chunkmode!=4:
                for k in range(len(star_fp)):
                   plot_bedpick(port_fp[k], star_fp[k], (1/ft)*bed[ind_port[-1]*k:ind_port[-1]*(k+1)], dist_m[ind_port[-1]*k:ind_port[-1]*(k+1)], ft, shape_port, sonpath, k, chunkmode)
             else:
                plot_bedpick(port_fp, star_fp, (1/ft)*bed, dist_m, ft, shape_port, sonpath, 0, chunkmode)

          metadat['bed'] = bed[:nrec]

    else:
       metadat['bed'] = dep_m[:nrec]*ft

    metadat['heading'] = metadat['heading'][:nrec]
    metadat['lon'] = lon[:nrec]
    metadat['lat'] = lat[:nrec]
    metadat['dist_m'] = dist_m[:nrec]
    metadat['dep_m'] = dep_m[:nrec]
    metadat['pix_m'] = 1/ft
    metadat['bed'] = metadat['bed'][:nrec]
    metadat['c'] = c
    metadat['t'] = t
    if model==2:
       metadat['f'] = f*2
    else:
       metadat['f'] = f

    metadat['spd'] = metadat['spd'][:nrec]
    metadat['time_s'] = metadat['time_s'][:nrec]
    metadat['e'] = metadat['e'][:nrec]
    metadat['n'] = metadat['n'][:nrec]
    metadat['es'] = metadat['es'][:nrec]
    metadat['ns'] = metadat['ns'][:nrec]
    try:
       metadat['caltime'] = metadat['caltime'][:nrec]
    except:
       metadat['caltime'] = metadat['caltime']

    savemat(os.path.normpath(os.path.join(sonpath,base+'meta.mat')), metadat ,oned_as='row')

    f = open(os.path.normpath(os.path.join(sonpath,base+'rawdat.csv')), 'wt')
    writer = csv.writer(f)
    writer.writerow( ('longitude', 'latitude', 'easting', 'northing', 'depth (m)', 'distance (m)', 'instr. heading (deg)', 'heading (deg.)' ) )
    for i in range(0, nrec):
       writer.writerow(( float(lon[i]),float(lat[i]),float(es[i]),float(ns[i]),float(dep_m[i]),float(dist_m[i]), float(metadat['instr_heading'][i]), float(metadat['heading'][i]) ))
    f.close()

    del lat, lon, dep_m #, dist_m

    if doplot==1:

       plot_pos(sonpath, metadat, es, ns)

       if 'dwnlow_fp' in locals():

          plot_dwnlow(dwnlow_fp, chunkmode, sonpath)

       if 'dwnhi_fp' in locals():

          plot_dwnhi(dwnhi_fp, chunkmode, sonpath)

    if os.name=='posix': # true if linux/mac
       elapsed = (time.time() - start)
    else: # windows
       elapsed = (time.clock() - start)
    print("Processing took "+ str(elapsed) + "seconds to analyse")

    print("Done!")
    print("===================================================")
Ejemplo n.º 8
0
def texture2(humfile, sonpath, win, doplot,  numclasses): 
          
      '''
      Create a texture lengthscale map using the algorithm detailed by Buscombe et al. (2015)
      This textural lengthscale is not a direct measure of grain size. Rather, it is a statistical 
      representation that integrates over many attributes of bed texture, of which grain size is the most important. 
      The technique is a physically based means to identify regions of texture within a sidescan echogram, 
      and could provide a basis for objective, automated riverbed sediment classification.

      Syntax
      ----------
      [] = PyHum.texture(humfile, sonpath, win, doplot, numclasses)

      Parameters
      ----------
      humfile : str
       path to the .DAT file
      sonpath : str
       path where the *.SON files are
      win : int, *optional* [Default=10]
       pixel in pixels of the moving window
      doplot : int, *optional* [Default=1]
       if 1, make plots, otherwise do not make plots
      numclasses : int, *optional* [Default=4]
       number of 'k means' that the texture lengthscale will be segmented into

      Returns
      -------
      sonpath+base+'_data_class.dat': memory-mapped file
        contains the texture lengthscale map

      sonpath+base+'_data_kclass.dat': memory-mapped file
        contains the k-means segmented texture lengthscale map

      References
      ----------
      .. [1] Buscombe, D., Grams, P.E., and Smith, S.M.C., 2015, Automated riverbed sediment
       classification using low-cost sidescan sonar. Journal of Hydraulic Engineering 10.1061/(ASCE)HY.1943-7900.0001079, 06015019.
      '''

      # prompt user to supply file if no input file given
      if not humfile:
         print('An input file is required!!!!!!')
         Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
         humfile = askopenfilename(filetypes=[("DAT files","*.DAT")]) 

      # prompt user to supply directory if no input sonpath is given
      if not sonpath:
         print('A *.SON directory is required!!!!!!')
         Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
         sonpath = askdirectory() 

      # print given arguments to screen and convert data type where necessary
      if humfile:
         print('Input file is %s' % (humfile))
         
      if sonpath:
         print('Sonar file path is %s' % (sonpath))
         
      if win:
         win = np.asarray(win,int)
         print('Window is %s square pixels' % (str(win)))
                
      if numclasses:
         numclasses = np.asarray(numclasses,int)
         print('Number of sediment classes: %s' % (str(numclasses)))
               
      if doplot:
         doplot = int(doplot)
         if doplot==0:
            print("Plots will not be made")
      
      
      print('[Default] Number of processors is %s' % (str(cpu_count())))
                        
      ########################################################
      ########################################################
      
      # start timer
      if os.name=='posix': # true if linux/mac or cygwin on windows
         start = time.time()
      else: # windows
         start = time.clock()

      # if son path name supplied has no separator at end, put one on
      if sonpath[-1]!=os.sep:
         sonpath = sonpath + os.sep

      base = humfile.split('.DAT') # get base of file name for output
      base = base[0].split(os.sep)[-1]

      # remove underscores, negatives and spaces from basename
      base = humutils.strip_base(base)   

      meta = loadmat(os.path.normpath(os.path.join(sonpath,base+'meta.mat')))

      ft = 1/loadmat(sonpath+base+'meta.mat')['pix_m']
      #pix_m = np.squeeze(meta['pix_m'])
      #dep_m = np.squeeze(meta['dep_m'])
      dist_m = np.squeeze(meta['dist_m'])

      ### port
      print("processing port side ...")
      # load memory mapped scan ... port
      shape_port = np.squeeze(meta['shape_port'])
      if shape_port!='':

         if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_port_lar.dat'))):
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat', 'float32', tuple(shape_port))         
         else:
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat', 'float32', tuple(shape_port))

         port_fp2 = io.get_mmap_data(sonpath, base, '_data_port_l.dat', 'float32', tuple(shape_port))

      ### star
      print("processing starboard side ...")
      # load memory mapped scan ... port
      shape_star = np.squeeze(loadmat(sonpath+base+'meta.mat')['shape_star'])
      if shape_star!='':
         if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_star_lar.dat'))):
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat', 'float32', tuple(shape_star))
         else:
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat', 'float32', tuple(shape_star))

         star_fp2 = io.get_mmap_data(sonpath, base, '_data_star_l.dat', 'float32', tuple(shape_star))

      if len(shape_star)>2:
         shape = shape_port.copy()
         shape[1] = shape_port[1] + shape_star[1]
      else:
         shape = []
         shape.append(1)
         shape.append(shape_port[0])
         shape.append(shape_port[1])
         shape[1] = shape_port[0] + shape_star[0]

      # create memory mapped file for Sp
      #with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'w+') as ff:
      #   fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))
      fp = np.zeros(tuple(shape), dtype='float32')

      if len(shape_star)>2:

         for p in range(len(port_fp)):
            
            merge = np.vstack((np.flipud(port_fp[p]), star_fp[p]))
            merge = denoise_tv_chambolle(merge.copy(), weight=2, multichannel=False).astype('float32')
            Snn = std_convoluted(merge, win)[1]
            del merge
 
            try:
               Snn = medfilt2d(Snn, (win+1,win+1))
            except:
               Snn = medfilt2d(Snn, (win,win))

            Snn[np.isnan(np.vstack((np.flipud(port_fp[p]), star_fp[p])))] = np.nan
            Snn[np.isnan(np.vstack((np.flipud(port_fp2[p]), star_fp2[p])))] = np.nan

            R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32', tuple(shape_star))

            R = np.vstack((np.flipud(R_fp[0]),R_fp[0]))
            
            R = R/np.max(R)

            rn = replace_nans.RN(R.astype('float64'),1000,0.01,2,'localmean')
            R = rn.getdata()
            del rn   

            Sp = (Snn**2) * np.cos(np.deg2rad(R)) /win ##**2

            fp[p] = Sp.astype('float32')
            del Sp

         #del fp # flush data to file
         shape = io.set_mmap_data(sonpath, base, '_data_class.dat', 'float32', np.squeeze(fp))
         del fp
         class_fp = io.get_mmap_data(sonpath, base, '_data_class.dat', 'float32', tuple(shape))

      else: 

            merge = np.vstack((np.flipud(port_fp), star_fp))
            merge = denoise_tv_chambolle(merge.copy(), weight=2, multichannel=False).astype('float32')
            Snn = std_convoluted(merge, win)[1]
            del merge

            try:
               Snn = medfilt2d(Snn, (win+1,win+1))
            except:
               Snn = medfilt2d(Snn, (win,win))

            Snn[np.isnan(np.vstack((np.flipud(port_fp), star_fp)))] = np.nan
            Snn[np.isnan(np.vstack((np.flipud(port_fp2), star_fp2)))] = np.nan

            R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32', tuple(shape_star))

            R = np.vstack((np.flipud(R_fp),R_fp))
            R = R/np.max(R)

            rn = replace_nans.RN(R.astype('float64'),1000,0.01,2,'localmean')
            R = rn.getdata()
            del rn   

            Sp = (Snn**2) * np.cos(np.deg2rad(R)) / win ##**2

            shape = io.set_mmap_data(sonpath, base, '_data_class.dat', 'float32', np.squeeze(Sp))

            #with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'w+') as ff:
            #   np.save(ff, np.squeeze(Sp).astype('float32'))

            #with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'r') as ff:
            #   class_fp = np.load(ff)

            #del Sp
            class_fp = io.get_mmap_data(sonpath, base, '_data_class.dat', 'float32', tuple(shape))

      dist_m = np.squeeze(loadmat(sonpath+base+'meta.mat')['dist_m'])

      ########################################################
      if doplot==1:

         if len(shape_star)>2:
            for p in range(len(star_fp)):
               plot_class(dist_m, shape_port, port_fp[p], star_fp[p], class_fp[p], ft, humfile, sonpath, base, p)
         else:
            plot_class(dist_m, shape_port, port_fp, star_fp, class_fp, ft, humfile, sonpath, base, 0)

         if len(shape_star)>2:
            for p in range(len(star_fp)):
               plot_contours(dist_m, shape_port, class_fp[p], ft, humfile, sonpath, base, numclasses, p)
         else:
            plot_contours(dist_m, shape_port, port_fp, star_fp,class_fp, ft, humfile, sonpath, base, numclasses, 0)
        

      #######################################################
      # k-means 
      
      if len(shape_star)>2:
         with open(os.path.normpath(os.path.join(sonpath,base+'_data_kclass.dat')), 'w+') as ff:
            fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))

         for p in range(len(port_fp)):
            wc = get_kclass(class_fp[p].copy(), numclasses)
            fp[p] = wc.astype('float32')
            del wc

         del fp

         kclass_fp = io.get_mmap_data(sonpath, base, '_data_kclass.dat', 'float32', tuple(shape))
            
      else:
         wc = get_kclass(class_fp.copy(), numclasses)

         with open(os.path.normpath(os.path.join(sonpath,base+'_data_kclass.dat')), 'w+') as ff:
            np.save(ff, np.squeeze(wc).astype('float32'))

         del wc
         
         with open(os.path.normpath(os.path.join(sonpath,base+'_data_kclass.dat')), 'r') as ff:
            kclass_fp = np.load(ff)
            
      ########################################################
      if doplot==1:

         if len(shape_star)>2:
            for p in range(len(star_fp)):
               plot_kmeans(dist_m, shape_port, port_fp[p], star_fp[p], kclass_fp[p], ft, humfile, sonpath, base, p)
         else:
            plot_kmeans(dist_m, shape_port, port_fp, star_fp, kclass_fp, ft, humfile, sonpath, base, 0)         

      if os.name=='posix': # true if linux/mac
         elapsed = (time.time() - start)
      else: # windows
         elapsed = (time.clock() - start)
      print("Processing took " +str(elapsed)+ "seconds to analyse")

      print("Done!")
      print("===================================================")