Example #1
0
def do_ppdrc(fp, filtsize):
    dat = fp.astype('float64')
    dat[np.isnan(dat)] = 0
    dat1 = ppdrc.ppdrc(dat, filtsize)
    dat1 = humutils.rescale(dat1.getdata(), np.min(dat), np.max(dat))
    dat1[np.isnan(fp)] = np.nan
    return dat1
Example #2
0
def do_ppdrc(fp, filtsize):
   dat = fp.astype('float64')
   dat[np.isnan(dat)] = 0
   dat1 = ppdrc.ppdrc(dat, filtsize)
   dat1 = humutils.rescale(dat1.getdata(),np.min(dat),np.max(dat))
   dat1[np.isnan(fp)] = np.nan
   return dat1
Example #3
0
                                       'float32', tuple(shape_star))

    if len(shape_star) > 2:
        shape = shape_port.copy()
        shape[1] = shape_port[1] + shape_star[1]
    else:
        shape = []
        shape.append(1)
        shape.append(shape_port[0])
        shape.append(shape_port[1])
        shape[1] = shape_port[0] + shape_star[0]

    #work on the entire scan
    im = np.vstack((np.flipud(np.hstack(port_fp)), np.hstack(star_fp)))
    im[np.isnan(im)] = 0
    im = humutils.rescale(im, 0, 1)
    del port_fp, star_fp
    print 'Calculating SLIC super pixels...'
    #get SLIC superpixels
    segments_slic = slic(im, n_segments=int(im.shape[0] / 10), compactness=.1)
    extent = int(np.shape(im)[0] / 2)
    del im

    print "Fount %s unique segments" % str(len(np.unique(segments_slic)))
    # get number pixels in scan line

    S = np.vstack(
        (np.flipud(segments_slic[:extent, :]), segments_slic[extent:, :]))
    del segments_slic

    e = np.squeeze(meta['e'])
Example #4
0
    ymin = gt[3] + (yres * ds.RasterYSize) + yres * 0.5
    ymax = gt[3] - yres * 0.5
    del ds
    # create a grid of xy coordinates in the original projection
    xx, yy = np.mgrid[xmin:xmax+xres:xres, ymax+yres:ymin:yres]
    return data, xx, yy, gt, proj

    
if __name__ == '__main__':
    root  = r"C:\workspace\gound_truth\output\slic_seg_plots"
    sed5class_raster = r"C:\workspace\Reach_4a\Multibeam\mb_sed_class\may2014_mb6086r_sedclass\mb_sed5class_2014_05_raster.tif"

    for num in [900,1000,1100,1200,1300,1400,1500,1600,1700,1800]:
        data, xx, yy, gt, proj = read_raster(sed5class_raster)
        data[np.isnan(data)] = 0
        data = utils.rescale(data,0,1)
        segments_slic = slic(data,n_segments=num,compactness=0.1)
        test = segments_slic.copy()
        test = test.astype('float')
        test[np.isnan(read_raster(sed5class_raster)[0])] = np.nan
        
        fig,ax = plt.subplots()
        ax.imshow(mark_boundaries(data, segments_slic,color=[1,0,0]),cmap='coolwarm') 
        title = ' n_segments = %s' %(str(num),)
        ax.set_title(title)
        plt.tight_layout()
        oName = root + os.sep + 'mkboundaries_sed5class_' + str(num) + '_segs.png'
        plt.savefig(oName,dpi=600)
        plt.close()
             
        fig,ax = plt.subplots()
def texture_slic(humfile, sonpath, doplot=1, numclasses=4, maxscale=20, notes=4):
          
      '''
      Create a texture lengthscale map using the algorithm detailed by Buscombe et al. (2015)
      This textural lengthscale is not a direct measure of grain size. Rather, it is a statistical 
      representation that integrates over many attributes of bed texture, of which grain size is the most important. 
      The technique is a physically based means to identify regions of texture within a sidescan echogram, 
      and could provide a basis for objective, automated riverbed sediment classification.

      Syntax
      ----------
      [] = PyHum.texture(humfile, sonpath, doplot, numclasses, maxscale, notes)

      Parameters
      ----------
      humfile : str
       path to the .DAT file
      sonpath : str
       path where the *.SON files are
      doplot : int, *optional* [Default=1]
       if 1, make plots, otherwise do not make plots
      numclasses : int, *optional* [Default=4]
       number of 'k means' that the texture lengthscale will be segmented into
      maxscale : int, *optional* [Default=20]
       Max scale as inverse fraction of data length for wavelet analysis
      notes : int, *optional* [Default=100]
       notes per octave for wavelet analysis

      Returns
      -------
      sonpath+base+'_data_class.dat': memory-mapped file
        contains the texture lengthscale map

      sonpath+base+'_data_kclass.dat': memory-mapped file
        contains the k-means segmented texture lengthscale map

      References
      ----------
      .. [1] Buscombe, D., Grams, P.E., and Smith, S.M.C., 2015, Automated riverbed sediment
       classification using low-cost sidescan sonar. Journal of Hydraulic Engineering 10.1061/(ASCE)HY.1943-7900.0001079, 06015019.
      '''

      # prompt user to supply file if no input file given
      if not humfile:
         print('An input file is required!!!!!!')
         Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
         humfile = askopenfilename(filetypes=[("DAT files","*.DAT")]) 

      # prompt user to supply directory if no input sonpath is given
      if not sonpath:
         print('A *.SON directory is required!!!!!!')
         Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
         sonpath = askdirectory() 

      # print given arguments to screen and convert data type where necessary
      if humfile:
         print('Input file is %s' % (humfile))
         
      if sonpath:
         print('Sonar file path is %s' % (sonpath))

      if numclasses:
         numclasses = np.asarray(numclasses,int)
         print('Number of sediment classes: %s' % (str(numclasses)))
         
      if maxscale:
         maxscale = np.asarray(maxscale,int)
         print('Max scale as inverse fraction of data length: %s' % (str(maxscale)))
         
      if notes:
         notes = np.asarray(notes,int)
         print('Notes per octave: %s' % (str(notes)))
         
      if doplot:
         doplot = int(doplot)
         if doplot==0:
            print("Plots will not be made")   
      
      
      print('[Default] Number of processors is %s' % (str(cpu_count())))
                        
      ########################################################
      ########################################################
      
      # start timer
      if os.name=='posix': # true if linux/mac or cygwin on windows
         start = time.time()
      else: # windows
         start = time.clock()

      # if son path name supplied has no separator at end, put one on
      if sonpath[-1]!=os.sep:
         sonpath = sonpath + os.sep

      base = humfile.split('.DAT') # get base of file name for output
      base = base[0].split(os.sep)[-1]

      # remove underscores, negatives and spaces from basename
      base = humutils.strip_base(base)   

      meta = loadmat(os.path.normpath(os.path.join(sonpath,base+'meta.mat')))

      ft = 1/loadmat(sonpath+base+'meta.mat')['pix_m']
      #pix_m = np.squeeze(meta['pix_m'])
      #dep_m = np.squeeze(meta['dep_m'])
      dist_m = np.squeeze(meta['dist_m'])

      ### port
      print("processing port side ...")
      # load memory mapped scan ... port
      shape_port = np.squeeze(meta['shape_port'])
      if shape_port!='':

         if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_port_lar.dat'))):
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat', 'float32', tuple(shape_port))         
         else:
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat', 'float32', tuple(shape_port))
          
         #port_fp2 = io.get_mmap_data(sonpath, base, '_data_port_l.dat', 'float32', tuple(shape_port))

      ### star
      print("processing starboard side ...")
      # load memory mapped scan ... port
      shape_star = np.squeeze(loadmat(sonpath+base+'meta.mat')['shape_star'])
      if shape_star!='':
         if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_star_lar.dat'))):
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat', 'float32', tuple(shape_star))
         else:
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat', 'float32', tuple(shape_star))

         #star_fp2 = io.get_mmap_data(sonpath, base, '_data_star_l.dat', 'float32', tuple(shape_star))

      if len(shape_star)>2:
         shape = shape_port.copy()
         shape[1] = shape_port[1] + shape_star[1]
      else:
         shape = []
         shape.append(1)
         shape.append(shape_port[0])
         shape.append(shape_port[1])
         shape[1] = shape_port[0] + shape_star[0]

      #work on the entire scan
      #im = humutils.rescale(np.vstack((np.flipud(np.hstack(port_fp)), np.hstack(star_fp))),0,1)
      im = np.vstack((np.flipud(np.hstack(port_fp)), np.hstack(star_fp)))
      im[np.isnan(im)] = 0
      im = humutils.rescale(im,0,1)

      #get SLIC superpixels
      segments_slic = slic(im, n_segments=int(im.shape[0]/10), compactness=.1)

      #pre-allocate texture lengthscale array
      tl = np.zeros(im.shape, dtype = "float64")

      #cycle through each segment and compute tl
      for k in np.unique(segments_slic):
         mask = np.zeros(im.shape[:2], dtype = "uint8")
         mask[segments_slic == k] = 255
         cmask, cim = crop_toseg(mask, im)
         tl[segments_slic == k] = parallel_me(cim, maxscale, notes, np.shape(cim)[0])

      R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32', tuple(shape_star))
      R = np.vstack((np.flipud(np.hstack(R_fp)), np.hstack(R_fp)))
      R = R/np.max(R)

      #correct for range and scale
      tl = tl * np.cos(R) * (1/ft)
      tl[im==0] = np.nan 
      tl[np.isnan(im)] = np.nan 

      # create memory mapped file for Sp
      with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'w+') as ff:
         fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))

      counter = 0
      if len(shape_star)>2:
         for p in range(len(port_fp)):
            if p==0:
               n,m = np.shape(np.vstack((np.flipud(port_fp[p]), star_fp[p])))
            else:
               n,m = np.shape(np.vstack((np.flipud(port_fp[p]), star_fp[p])))
            Sp = tl[:n, counter:counter+m]
            counter = counter+m
            fp[p] = Sp.astype('float32')
            del Sp
         del fp # flush data to file

         class_fp = io.get_mmap_data(sonpath, base, '_data_class.dat', 'float32', tuple(shape))

      else:

            with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'w+') as ff:
               np.save(ff, np.squeeze(Sp).astype('float32'))

            with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'r') as ff:
               class_fp = np.load(ff)

      dist_m = np.squeeze(loadmat(sonpath+base+'meta.mat')['dist_m'])

      ########################################################
      if doplot==1:

         if len(shape_star)>2:
            for p in range(len(star_fp)):
               plot_class(dist_m, shape_port, port_fp[p], star_fp[p], class_fp[p], ft, humfile, sonpath, base, p)
         else:
            plot_class(dist_m, shape_port, port_fp, star_fp, class_fp, ft, humfile, sonpath, base, 0)

         if len(shape_star)>2:
            for p in range(len(star_fp)):
               plot_contours(dist_m, shape_port, class_fp[p], ft, humfile, sonpath, base, numclasses, p)
         else:
            plot_contours(dist_m, shape_port, class_fp, ft, humfile, sonpath, base, numclasses, 0)
        

      #######################################################
      # k-means 
      
      if len(shape_star)>2:
         with open(os.path.normpath(os.path.join(sonpath,base+'_data_kclass.dat')), 'w+') as ff:
            fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))

         for p in range(len(port_fp)):
            wc = get_kclass(class_fp[p].copy(), numclasses)
            fp[p] = wc.astype('float32')
            del wc

         del fp

         kclass_fp = io.get_mmap_data(sonpath, base, '_data_kclass.dat', 'float32', tuple(shape))
            
      else:
         wc = get_kclass(class_fp.copy(), numclasses)

         with open(os.path.normpath(os.path.join(sonpath,base+'_data_kclass.dat')), 'w+') as ff:
            np.save(ff, np.squeeze(wc).astype('float32'))

         del wc
         
         with open(os.path.normpath(os.path.join(sonpath,base+'_data_kclass.dat')), 'r') as ff:
            kclass_fp = np.load(ff)
            
      ########################################################
      if doplot==1:

         if len(shape_star)>2:
            for p in range(len(star_fp)):
               plot_kmeans(dist_m, shape_port, port_fp[p], star_fp[p], kclass_fp[p], ft, humfile, sonpath, base, p)
         else:
            plot_kmeans(dist_m, shape_port, port_fp, star_fp, kclass_fp, ft, humfile, sonpath, base, 0)         

      if os.name=='posix': # true if linux/mac
         elapsed = (time.time() - start)
      else: # windows
         elapsed = (time.clock() - start)
      print("Processing took "+str(elapsed)+"seconds to analyse")

      print("Done!")
Example #6
0
def texture_slic(humfile,
                 sonpath,
                 doplot=1,
                 numclasses=4,
                 maxscale=20,
                 notes=4):
    '''
      Create a texture lengthscale map using the algorithm detailed by Buscombe et al. (2015)
      This textural lengthscale is not a direct measure of grain size. Rather, it is a statistical 
      representation that integrates over many attributes of bed texture, of which grain size is the most important. 
      The technique is a physically based means to identify regions of texture within a sidescan echogram, 
      and could provide a basis for objective, automated riverbed sediment classification.

      Syntax
      ----------
      [] = PyHum.texture(humfile, sonpath, doplot, numclasses, maxscale, notes)

      Parameters
      ----------
      humfile : str
       path to the .DAT file
      sonpath : str
       path where the *.SON files are
      doplot : int, *optional* [Default=1]
       if 1, make plots, otherwise do not make plots
      numclasses : int, *optional* [Default=4]
       number of 'k means' that the texture lengthscale will be segmented into
      maxscale : int, *optional* [Default=20]
       Max scale as inverse fraction of data length for wavelet analysis
      notes : int, *optional* [Default=100]
       notes per octave for wavelet analysis

      Returns
      -------
      sonpath+base+'_data_class.dat': memory-mapped file
        contains the texture lengthscale map

      sonpath+base+'_data_kclass.dat': memory-mapped file
        contains the k-means segmented texture lengthscale map

      References
      ----------
      .. [1] Buscombe, D., Grams, P.E., and Smith, S.M.C., 2015, Automated riverbed sediment
       classification using low-cost sidescan sonar. Journal of Hydraulic Engineering 10.1061/(ASCE)HY.1943-7900.0001079, 06015019.
      '''

    # prompt user to supply file if no input file given
    if not humfile:
        print('An input file is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        humfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print('A *.SON directory is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print('Input file is %s' % (humfile))

    if sonpath:
        print('Sonar file path is %s' % (sonpath))

    if numclasses:
        numclasses = np.asarray(numclasses, int)
        print('Number of sediment classes: %s' % (str(numclasses)))

    if maxscale:
        maxscale = np.asarray(maxscale, int)
        print('Max scale as inverse fraction of data length: %s' %
              (str(maxscale)))

    if notes:
        notes = np.asarray(notes, int)
        print('Notes per octave: %s' % (str(notes)))

    if doplot:
        doplot = int(doplot)
        if doplot == 0:
            print("Plots will not be made")

    print('[Default] Number of processors is %s' % (str(cpu_count())))

    ########################################################
    ########################################################

    # start timer
    if os.name == 'posix':  # true if linux/mac or cygwin on windows
        start = time.time()
    else:  # windows
        start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')))

    ft = 1 / loadmat(sonpath + base + 'meta.mat')['pix_m']
    #pix_m = np.squeeze(meta['pix_m'])
    #dep_m = np.squeeze(meta['dep_m'])
    dist_m = np.squeeze(meta['dist_m'])

    ### port
    print("processing port side ...")
    # load memory mapped scan ... port
    shape_port = np.squeeze(meta['shape_port'])
    if shape_port != '':

        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_port_lar.dat'))):
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat',
                                       'float32', tuple(shape_port))
        else:
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat',
                                       'float32', tuple(shape_port))

        #port_fp2 = io.get_mmap_data(sonpath, base, '_data_port_l.dat', 'float32', tuple(shape_port))

    ### star
    print("processing starboard side ...")
    # load memory mapped scan ... port
    shape_star = np.squeeze(loadmat(sonpath + base + 'meta.mat')['shape_star'])
    if shape_star != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_star_lar.dat'))):
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat',
                                       'float32', tuple(shape_star))
        else:
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat',
                                       'float32', tuple(shape_star))

        #star_fp2 = io.get_mmap_data(sonpath, base, '_data_star_l.dat', 'float32', tuple(shape_star))

    if len(shape_star) > 2:
        shape = shape_port.copy()
        shape[1] = shape_port[1] + shape_star[1]
    else:
        shape = []
        shape.append(1)
        shape.append(shape_port[0])
        shape.append(shape_port[1])
        shape[1] = shape_port[0] + shape_star[0]

    #work on the entire scan
    #im = humutils.rescale(np.vstack((np.flipud(np.hstack(port_fp)), np.hstack(star_fp))),0,1)
    im = np.vstack((np.flipud(np.hstack(port_fp)), np.hstack(star_fp)))
    im[np.isnan(im)] = 0
    im = humutils.rescale(im, 0, 1)

    #get SLIC superpixels
    segments_slic = slic(im, n_segments=int(im.shape[0] / 10), compactness=.1)

    #pre-allocate texture lengthscale array
    tl = np.zeros(im.shape, dtype="float64")

    #cycle through each segment and compute tl
    for k in np.unique(segments_slic):
        mask = np.zeros(im.shape[:2], dtype="uint8")
        mask[segments_slic == k] = 255
        cmask, cim = crop_toseg(mask, im)
        tl[segments_slic == k] = parallel_me(cim, maxscale, notes,
                                             np.shape(cim)[0])

    R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32',
                            tuple(shape_star))
    R = np.vstack((np.flipud(np.hstack(R_fp)), np.hstack(R_fp)))
    R = R / np.max(R)

    #correct for range and scale
    tl = tl * np.cos(R) * (1 / ft)
    tl[im == 0] = np.nan
    tl[np.isnan(im)] = np.nan

    # create memory mapped file for Sp
    with open(
            os.path.normpath(os.path.join(sonpath, base + '_data_class.dat')),
            'w+') as ff:
        fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))

    counter = 0
    if len(shape_star) > 2:
        for p in range(len(port_fp)):
            if p == 0:
                n, m = np.shape(np.vstack((np.flipud(port_fp[p]), star_fp[p])))
            else:
                n, m = np.shape(np.vstack((np.flipud(port_fp[p]), star_fp[p])))
            Sp = tl[:n, counter:counter + m]
            counter = counter + m
            fp[p] = Sp.astype('float32')
            del Sp
        del fp  # flush data to file

        class_fp = io.get_mmap_data(sonpath, base, '_data_class.dat',
                                    'float32', tuple(shape))

    else:

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_class.dat')),
                'w+') as ff:
            np.save(ff, np.squeeze(Sp).astype('float32'))

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_class.dat')),
                'r') as ff:
            class_fp = np.load(ff)

    dist_m = np.squeeze(loadmat(sonpath + base + 'meta.mat')['dist_m'])

    ########################################################
    if doplot == 1:

        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_class(dist_m, shape_port, port_fp[p], star_fp[p],
                           class_fp[p], ft, humfile, sonpath, base, p)
        else:
            plot_class(dist_m, shape_port, port_fp, star_fp, class_fp, ft,
                       humfile, sonpath, base, 0)

        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_contours(dist_m, shape_port, class_fp[p], ft, humfile,
                              sonpath, base, numclasses, p)
        else:
            plot_contours(dist_m, shape_port, class_fp, ft, humfile, sonpath,
                          base, numclasses, 0)

    #######################################################
    # k-means

    if len(shape_star) > 2:
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'w+') as ff:
            fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))

        for p in range(len(port_fp)):
            wc = get_kclass(class_fp[p].copy(), numclasses)
            fp[p] = wc.astype('float32')
            del wc

        del fp

        kclass_fp = io.get_mmap_data(sonpath, base, '_data_kclass.dat',
                                     'float32', tuple(shape))

    else:
        wc = get_kclass(class_fp.copy(), numclasses)

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'w+') as ff:
            np.save(ff, np.squeeze(wc).astype('float32'))

        del wc

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'r') as ff:
            kclass_fp = np.load(ff)

    ########################################################
    if doplot == 1:

        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_kmeans(dist_m, shape_port, port_fp[p], star_fp[p],
                            kclass_fp[p], ft, humfile, sonpath, base, p)
        else:
            plot_kmeans(dist_m, shape_port, port_fp, star_fp, kclass_fp, ft,
                        humfile, sonpath, base, 0)

    if os.name == 'posix':  # true if linux/mac
        elapsed = (time.time() - start)
    else:  # windows
        elapsed = (time.clock() - start)
    print("Processing took " + str(elapsed) + "seconds to analyse")

    print("Done!")