コード例 #1
0
ファイル: _pyhum_e1e2.py プロジェクト: jacurtis-usgs/PyHum
def e1e2(humfile, sonpath, cs2cs_args, ph, temp, salinity, beam, transfreq,
         integ, numclusters, doplot):
    '''
    Analysis of first (e1, 'roughness') and second (e2, 'hardness') echo returns from the high-frequency downward looking echosounder
    Generates generalised acoustic parameters
    for the purposes of point classification of submerged substrates/vegetation
    Accounts for the absorption of sound in water
    Does a basic k-means cluster of e1 and e2 coefficients into specified number of 'acoustic classes'
    based on code by Barb Fagetter ([email protected])

    Syntax
    ----------
    [] = PyHum.e1e2(humfile, sonpath, cs2cs_args, ph, temp, salinity, beam, transfreq, integ, numclusters, doplot)

    Parameters
    ----------
    humfile : str
       path to the .DAT file

    sonpath : str
       path where the *.SON files are

    cs2cs_args : int, *optional* [Default="epsg:26949"]
       arguments to create coordinates in a projected coordinate system
       this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates
       into any projection supported by the proj.4 libraries

    ph : float, *optional* [Default=7.0]
       water acidity in pH

    temp : float, *optional* [Default=10.0]
       water temperature in degrees Celsius

    salinity : float, *optional* [Default=0.0]
       salinity of water in parts per thousand

    beam : float, *optional* [Default=20.0]
       beam width in degrees

    transfreq : float, *optional* [Default=200.0]
       transducer frequency in kHz

    integ : int, *optional* [Default=5]
       number of pings over which to integrate

    numclusters : int, *optional* [Default=3]
       number of acoustic classes to classify all the data into

    doplot : int, *optional* [Default=1]
       1 = make plots, otherwise do not

    Returns
    -------
    sonpath+base+'rough_and_hard'+str(p)+'.csv'  : csv file
        contains the following fields: 'longitude', 'latitude', 'easting', 'northing', 'depth', 
        'roughness', 'hardness', 'average roughness', 'average hardness','k-mean label'
        of the pth chunk
        'average' implies average over 'integ' successive pings

    The following are returned if doplot==1:

    sonpath+'e1e2_scan'+str(p).png : png image file
       png image file showing the downward echosounder echogram overlain with the locations of the start and 
       end of the first and second echo region envelope 

    sonpath+'e1e2_kmeans'+str(p).png: png image file
        png image file showing 1) (left) volume scattering coefficient 1 versus volume scattering coefficient 2, colour-coded
        by k-means acoustic class, and
        2) (right) e1 versus e2, colour-coded
        by k-means acoustic class

    sonpath+'rgh_hard_kmeans'+str(p).png : png image file
        png image file showing scatter plot of easting versus northing colour-coded by k-means acoustic class 

    sonpath+'map_rgh'+str(p).png : png image file
        png image file showing scatter plot of 'roughness' (e1) overlying an aerial image pulled from an ESRI image server 

    sonpath+'map_hard'+str(p).png : png image file
        png image file showing scatter plot of 'hardness' (e2) overlying an aerial image pulled from an ESRI image server 

    sonpath,'Rough'+str(p).png : png image file 
        png image overlay associated with the kml file, sonpath,'Hard'+str(p).kml

    sonpath,'Rough'+str(p).kml : kml file
        kml overlay for showing roughness scatter plot (sonpath,'Rough'+str(p).png)

    sonpath,'Hard'+str(p).png : png image file
        png image overlay associated with the kml file, sonpath,'Hard'+str(p).kml
    
    sonpath,'Hard'+str(p).kml : kml file
        kml overlay for showing harness scatter plot (sonpath,'Hard'+str(p).png)

    '''

    # prompt user to supply file if no input file given
    if not humfile:
        print('An input file is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        inputfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print('A *.SON directory is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print('Input file is %s' % (humfile))

    if sonpath:
        print('Sonar file path is %s' % (sonpath))

    if cs2cs_args:
        print('cs2cs arguments are %s' % (cs2cs_args))

    if beam:
        beam = np.asarray(beam, float)
        print('Beam is %s deg' % (str(beam)))

    if salinity:
        salinity = np.asarray(salinity, float)
        print('Salinity is %s ppt' % (str(salinity)))

    if ph:
        ph = np.asarray(ph, float)
        print('pH is %s' % (str(ph)))

    if temp:
        temp = np.asarray(temp, float)
        print('Temperature is %s' % (str(temp)))

    if transfreq:
        transfreq = np.asarray(transfreq, float)
        print('Dwnward sonar freq. is %s' % (str(transfreq)))

    if integ:
        integ = np.asarray(integ, int)
        print('number of records for integration is %s' % (str(integ)))

    if numclusters:
        numclusters = np.asarray(numclusters, int)
        print('number of returned acoustic clusters is %s' %
              (str(numclusters)))

    if doplot:
        doplot = int(doplot)
        if doplot == 0:
            print("Plots will not be made")

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')))

    beamwidth = beam * (np.sqrt(0.5))
    equivbeam = (5.78 / (np.power(1.6, 2))) * (np.power((np.sin(
        (beamwidth * np.pi) / (2 * 180))), 2))

    meta = loadmat(sonpath + base + 'meta.mat')

    c = np.squeeze(meta['c'])
    t = np.squeeze(meta['t'])
    f = np.squeeze(meta['f'])
    maxW = np.squeeze(meta['maxW'])

    lat = np.squeeze(meta['lat'])
    lon = np.squeeze(meta['lon'])
    es = np.squeeze(meta['e'])
    ns = np.squeeze(meta['n'])
    dep = np.squeeze(meta['dep_m'])
    #del meta

    # load memory mapped scans
    shape_hi = np.squeeze(meta['shape_hi'])
    if shape_hi != '':
        try:
            #dwnhi_fp = np.memmap(sonpath+base+'_data_dwnhi.dat', dtype='int16', mode='r', shape=tuple(shape_hi))
            with open(
                    os.path.normpath(
                        os.path.join(sonpath, base + '_data_dwnhi.dat')),
                    'r') as ff:
                dwnhi_fp = np.memmap(ff,
                                     dtype='int16',
                                     mode='r',
                                     shape=tuple(shape_hi))

        except:
            shape_lo = np.squeeze(meta['shape_low'])
            #dwnhi_fp = np.memmap(sonpath+base+'_data_dwnhi.dat', dtype='int16', mode='r', shape=tuple(shape_lo))
            with open(
                    os.path.normpath(
                        os.path.join(sonpath, base + '_data_dwnhi.dat')),
                    'r') as ff:
                dwnhi_fp = np.memmap(ff,
                                     dtype='int16',
                                     mode='r',
                                     shape=tuple(shape_lo))

    if 'dwnhi_fp' in locals():

        theta3dB = np.arcsin(c / (t *
                                  (f * 1000)))  # *(180/pi) # to see in degs
        ft = (np.pi / 2) * (1 / theta3dB)
        bed = ft * dep

        if len(shape_hi) > 2:
            i = np.linspace(1, shape_hi[0] * shape_hi[2], len(bed))
            #np.shape(beam_data)[1],len(bed))
            #bedi = np.interp(np.linspace(1,np.shape(beam_data)[1],np.shape(beam_data)[1]), i, bed)
            bedi = np.interp(
                np.linspace(1, shape_hi[0] * shape_hi[2],
                            shape_hi[0] * shape_hi[2]), i, bed)
            ei = np.interp(
                np.linspace(1, shape_hi[0] * shape_hi[2],
                            shape_hi[0] * shape_hi[2]), i, es)
            ni = np.interp(
                np.linspace(1, shape_hi[0] * shape_hi[2],
                            shape_hi[0] * shape_hi[2]), i, ns)
            lati = np.interp(
                np.linspace(1, shape_hi[0] * shape_hi[2],
                            shape_hi[0] * shape_hi[2]), i, lat)
            loni = np.interp(
                np.linspace(1, shape_hi[0] * shape_hi[2],
                            shape_hi[0] * shape_hi[2]), i, lon)
            del i
        else:
            i = np.linspace(1, shape_hi[1], len(bed))
            #np.shape(beam_data)[1],len(bed))
            #bedi = np.interp(np.linspace(1,np.shape(beam_data)[1],np.shape(beam_data)[1]), i, bed)
            bedi = np.interp(np.linspace(1, shape_hi[1], shape_hi[1]), i, bed)
            ei = np.interp(np.linspace(1, shape_hi[1], shape_hi[1]), i, es)
            ni = np.interp(np.linspace(1, shape_hi[1], shape_hi[1]), i, ns)
            lati = np.interp(np.linspace(1, shape_hi[1], shape_hi[1]), i, lat)
            loni = np.interp(np.linspace(1, shape_hi[1], shape_hi[1]), i, lon)
            del i

        bedi = np.asarray(bedi, 'int')

        depi = ((1 / ft) * bedi)

        # near-field region
        nf = int(ft * (1000 * (0.105**2) * f / (4 * 1500)))

        #absorption = calcAb(c, ph, salinity, temp, np.asarray(depi), transfreq)
        absorption = water_atten(np.asarray(depi), transfreq, c, ph, temp,
                                 salinity)

        if len(shape_hi) > 2:
            for p in range(len(dwnhi_fp)):
                #make an index of every other record
                ind = range(0, np.shape(dwnhi_fp[p])[1])

                Zdepi = depi[shape_hi[2] * p:shape_hi[2] * (p + 1)]
                Zabsorp = absorption[shape_hi[2] * p:shape_hi[2] * (p + 1)]
                Zlat = lati[shape_hi[2] * p:shape_hi[2] * (p + 1)]
                Zlon = loni[shape_hi[2] * p:shape_hi[2] * (p + 1)]
                Zes = ei[shape_hi[2] * p:shape_hi[2] * (p + 1)]
                Zns = ni[shape_hi[2] * p:shape_hi[2] * (p + 1)]

                try:  #parallel processing with all available cores
                    w = Parallel(n_jobs=-1, verbose=0)(delayed(
                        get_rgh_hrd)(dwnhi_fp[p][:, i], Zdepi[i], Zabsorp[i],
                                     c, nf, transfreq, equivbeam, maxW, pi, ft)
                                                       for i in ind)
                except:  #fall back to serial
                    w = Parallel(n_jobs=1, verbose=0)(delayed(
                        get_rgh_hrd)(dwnhi_fp[p][:, i], Zdepi[i], Zabsorp[i],
                                     c, nf, transfreq, equivbeam, maxW, pi, ft)
                                                      for i in ind)

                rough, hard, sv_e1, sv_e2, e1a, e1b, e2a, e2b = zip(*w)

                rough = np.array(rough, 'float')
                rough[rough == 0.0] = np.nan

                hard = np.array(hard, 'float')
                hard[hard == 0.0] = np.nan

                sv_e1 = np.array(sv_e1, 'float')
                sv_e1[sv_e1 == 0.0] = np.nan

                sv_e2 = np.array(sv_e2, 'float')
                sv_e2[sv_e2 == 0.0] = np.nan

                try:
                    nans, y = humutils.nan_helper(rough)
                    rough[nans] = np.interp(y(nans), y(~nans), rough[~nans])
                except:
                    pass

                try:
                    nans, y = humutils.nan_helper(hard)
                    hard[nans] = np.interp(y(nans), y(~nans), hard[~nans])
                except:
                    pass

                try:
                    nans, y = humutils.nan_helper(sv_e1)
                    sv_e1[nans] = np.interp(y(nans), y(~nans), sv_e1[~nans])
                except:
                    pass

                try:
                    nans, y = humutils.nan_helper(sv_e2)
                    sv_e2[nans] = np.interp(y(nans), y(~nans), sv_e2[~nans])
                except:
                    pass

                data = np.column_stack([sv_e1, sv_e2])
                k_means = MiniBatchKMeans(numclusters)
                # fit the model
                k_means.fit(data)
                values = k_means.cluster_centers_.squeeze()
                labels = k_means.labels_

                hardav = humutils.runningMeanFast(hard, integ)
                roughav = humutils.runningMeanFast(rough, integ)

                #f = open(sonpath+base+'rough_and_hard'+str(p)+'.csv', 'wt')
                f = open(
                    os.path.normpath(
                        os.path.join(sonpath, base + 'rough_and_hard' +
                                     str(p) + '.csv')), 'wt')
                writer = csv.writer(f)
                writer.writerow(
                    ('longitude', 'latitude', 'easting', 'northing', 'depth',
                     'roughness', 'hardness', 'average roughness',
                     'average hardness', 'k-mean label'))
                for i in range(0, len(rough)):
                    writer.writerow(
                        (float(Zlon[i]), float(Zlat[i]), float(Zes[i]),
                         float(Zns[i]), float(Zdepi[i]), float(rough[i]),
                         float(hard[i]), float(roughav[i]), float(hardav[i]),
                         labels[i].astype(int)))
                f.close()

                if doplot == 1:
                    try:

                        fig = plt.figure()
                        plt.imshow(dwnhi_fp[p], cmap='gray')
                        plt.plot(e1a, 'r')
                        plt.plot(e1b, 'y')
                        plt.plot(e2a, 'c')
                        plt.plot(e2b, 'm')
                        plt.axis('tight')
                        #plt.show()
                        custom_save(sonpath, 'e1e2_scan' + str(p))
                        del fig

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:

                        fig = plt.figure()
                        fig.subplots_adjust(wspace=0.4, hspace=0.4)
                        plt.subplot(221)
                        plt.plot(sv_e1[labels == 0], sv_e2[labels == 0], 'ko')
                        plt.plot(sv_e1[labels == 1], sv_e2[labels == 1], 'ro')
                        plt.plot(sv_e1[labels == 2], sv_e2[labels == 2], 'bo')
                        plt.xlabel('SV1')
                        plt.ylabel('SV2')
                        plt.xlim(0, 1)
                        plt.ylim(0, 1)

                        plt.subplot(222)
                        plt.plot(rough[labels == 0], hard[labels == 0], 'ko')
                        plt.plot(rough[labels == 1], hard[labels == 1], 'ro')
                        plt.plot(rough[labels == 2], hard[labels == 2], 'bo')
                        plt.xlabel('E1')
                        plt.ylabel('E2')
                        plt.xlim(1, 8)
                        plt.ylim(1, 8)
                        #plt.show()
                        custom_save(sonpath, 'e1e2_kmeans' + str(p))
                        del fig

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:

                        fig = plt.figure()
                        s = plt.scatter(Zes[labels == 0],
                                        Zns[labels == 0],
                                        marker='o',
                                        c='k',
                                        s=10,
                                        linewidth=0,
                                        vmin=0,
                                        vmax=8)
                        s = plt.scatter(Zes[labels == 1],
                                        Zns[labels == 1],
                                        marker='o',
                                        c='r',
                                        s=10,
                                        linewidth=0,
                                        vmin=0,
                                        vmax=8)
                        s = plt.scatter(Zes[labels == 2],
                                        Zns[labels == 2],
                                        marker='o',
                                        c='b',
                                        s=10,
                                        linewidth=0,
                                        vmin=0,
                                        vmax=8)
                        custom_save(sonpath, 'rgh_hard_kmeans' + str(p))
                        del fig

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:

                        print("drawing and printing map ...")
                        fig = plt.figure(frameon=False)
                        #fig.subplots_adjust(wspace = 0.4, hspace=0.4)
                        map = Basemap(
                            projection='merc',
                            epsg=cs2cs_args.split(':')[1],  #epsg=26949,
                            resolution='i',  #h #f
                            llcrnrlon=np.min(Zlon) - 0.0001,
                            llcrnrlat=np.min(Zlat) - 0.0001,
                            urcrnrlon=np.max(Zlon) + 0.0001,
                            urcrnrlat=np.max(Zlat) + 0.0001)

                        # draw point cloud
                        x, y = map.projtran(Zlon, Zlat)

                        cs = map.scatter(x.flatten(),
                                         y.flatten(),
                                         1,
                                         rough.flatten(),
                                         linewidth=0,
                                         vmin=0,
                                         vmax=8)

                        try:
                            map.arcgisimage(
                                server='http://server.arcgisonline.com/ArcGIS',
                                service='ESRI_Imagery_World_2D',
                                xpixels=1000,
                                ypixels=None,
                                dpi=300)
                        except:
                            map.arcgisimage(
                                server='http://server.arcgisonline.com/ArcGIS',
                                service='World_Imagery',
                                xpixels=1000,
                                ypixels=None,
                                dpi=300)

                        cbar = map.colorbar(cs, location='bottom', pad="5%")
                        cbar.set_label('E1')
                        cbar.set_ticks([0, 2, 4, 6, 8])

                        custom_save(sonpath, 'map_rgh' + str(p))
                        del fig

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:
                        fig = plt.figure()
                        #fig.subplots_adjust(wspace = 0.4, hspace=0.4)
                        map = Basemap(
                            projection='merc',
                            epsg=cs2cs_args.split(':')[1],
                            resolution='i',  #h #f
                            llcrnrlon=np.min(Zlon) - 0.0001,
                            llcrnrlat=np.min(Zlat) - 0.0001,
                            urcrnrlon=np.max(Zlon) + 0.0001,
                            urcrnrlat=np.max(Zlat) + 0.0001)

                        # draw point cloud
                        x, y = map.projtran(Zlon, Zlat)

                        cs = map.scatter(x.flatten(),
                                         y.flatten(),
                                         1,
                                         hard.flatten(),
                                         linewidth=0,
                                         vmin=0,
                                         vmax=8)

                        try:
                            map.arcgisimage(
                                server='http://server.arcgisonline.com/ArcGIS',
                                service='ESRI_Imagery_World_2D',
                                xpixels=1000,
                                ypixels=None,
                                dpi=300)
                        except:
                            map.arcgisimage(
                                server='http://server.arcgisonline.com/ArcGIS',
                                service='World_Imagery',
                                xpixels=1000,
                                ypixels=None,
                                dpi=300)

                        cbar = map.colorbar(cs, location='bottom', pad="5%")
                        cbar.set_label('E2')
                        cbar.set_ticks([0, 2, 4, 6, 8])

                        custom_save(sonpath, 'map_hard' + str(p))
                        del fig

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:
                        print("drawing and printing map ...")
                        fig = plt.figure(frameon=False)
                        map = Basemap(
                            projection='merc',
                            epsg=cs2cs_args.split(':')[1],  #26949,
                            resolution='i',  #h #f
                            llcrnrlon=np.min(Zlon) - 0.001,
                            llcrnrlat=np.min(Zlat) - 0.001,
                            urcrnrlon=np.max(Zlon) + 0.001,
                            urcrnrlat=np.max(Zlat) + 0.001)

                        ax = plt.Axes(
                            fig,
                            [0., 0., 1., 1.],
                        )
                        ax.set_axis_off()
                        fig.add_axes(ax)

                        ## draw point cloud
                        x, y = map.projtran(Zlon, Zlat)
                        map.scatter(x.flatten(),
                                    y.flatten(),
                                    1,
                                    rough.flatten(),
                                    linewidth='0',
                                    vmin=0,
                                    vmax=8)

                        custom_save(sonpath, 'Rough' + str(p))
                        del fig

                        kml = simplekml.Kml()
                        ground = kml.newgroundoverlay(name='GroundOverlay')
                        ground.icon.href = 'Rough' + str(p) + '.png'
                        ground.latlonbox.north = np.min(Zlat) - 0.001
                        ground.latlonbox.south = np.max(Zlat) + 0.001
                        ground.latlonbox.east = np.max(Zlon) + 0.001
                        ground.latlonbox.west = np.min(Zlon) - 0.001
                        ground.latlonbox.rotation = 0

                        #kml.save(sonpath+'Rough'+str(p)+'.kml')
                        kml.save(
                            os.path.normpath(
                                os.path.join(sonpath,
                                             'Rough' + str(p) + '.kml')))

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:
                        print("drawing and printing map ...")
                        fig = plt.figure(frameon=False)
                        map = Basemap(
                            projection='merc',
                            epsg=cs2cs_args.split(':')[1],  #26949,
                            resolution='i',  #h #f
                            llcrnrlon=np.min(Zlon) - 0.001,
                            llcrnrlat=np.min(Zlat) - 0.001,
                            urcrnrlon=np.max(Zlon) + 0.001,
                            urcrnrlat=np.max(Zlat) + 0.001)

                        ax = plt.Axes(
                            fig,
                            [0., 0., 1., 1.],
                        )
                        ax.set_axis_off()
                        fig.add_axes(ax)

                        ## draw point cloud
                        x, y = map.projtran(Zlon, Zlat)
                        map.scatter(x.flatten(),
                                    y.flatten(),
                                    1,
                                    hard.flatten(),
                                    linewidth='0',
                                    vmin=0,
                                    vmax=8)

                        custom_save(sonpath, 'Hard' + str(p))
                        del fig

                        kml = simplekml.Kml()
                        ground = kml.newgroundoverlay(name='GroundOverlay')
                        ground.icon.href = 'Hard' + str(p) + '.png'
                        ground.latlonbox.north = np.min(Zlat) - 0.001
                        ground.latlonbox.south = np.max(Zlat) + 0.001
                        ground.latlonbox.east = np.max(Zlon) + 0.001
                        ground.latlonbox.west = np.min(Zlon) - 0.001
                        ground.latlonbox.rotation = 0

                        #kml.save(sonpath+'Hard'+str(p)+'.kml')
                        kml.save(
                            os.path.normpath(
                                os.path.join(sonpath,
                                             'Hard' + str(p) + '.kml')))

                    except:
                        print("plot could not be produced")

        else:
            if 2 > 1:  # need to tiday all this up later!!
                #make an index of every other record
                ind = range(0, np.shape(dwnhi_fp)[1])

                Zdepi = depi
                Zabsorp = absorption
                Zlat = lati
                Zlon = loni
                Zes = ei
                Zns = ni

                try:  #parallel processing with all available cores
                    w = Parallel(n_jobs=-1, verbose=0)(delayed(
                        get_rgh_hrd)(dwnhi_fp[:, i], Zdepi[i], Zabsorp[i], c,
                                     nf, transfreq, equivbeam, maxW, pi, ft)
                                                       for i in ind)
                except:  #fall back to serial
                    w = Parallel(n_jobs=1, verbose=0)(delayed(
                        get_rgh_hrd)(dwnhi_fp[:, i], Zdepi[i], Zabsorp[i], c,
                                     nf, transfreq, equivbeam, maxW, pi, ft)
                                                      for i in ind)

                rough, hard, sv_e1, sv_e2, e1a, e1b, e2a, e2b = zip(*w)

                rough = np.array(rough, 'float')
                rough[rough == 0.0] = np.nan

                hard = np.array(hard, 'float')
                hard[hard == 0.0] = np.nan

                sv_e1 = np.array(sv_e1, 'float')
                sv_e1[sv_e1 == 0.0] = np.nan

                sv_e2 = np.array(sv_e2, 'float')
                sv_e2[sv_e2 == 0.0] = np.nan

                try:
                    nans, y = humutils.nan_helper(rough)
                    rough[nans] = np.interp(y(nans), y(~nans), rough[~nans])
                except:
                    pass

                try:
                    nans, y = humutils.nan_helper(hard)
                    hard[nans] = np.interp(y(nans), y(~nans), hard[~nans])
                except:
                    pass

                try:
                    nans, y = humutils.nan_helper(sv_e1)
                    sv_e1[nans] = np.interp(y(nans), y(~nans), sv_e1[~nans])
                except:
                    pass

                try:
                    nans, y = humutils.nan_helper(sv_e2)
                    sv_e2[nans] = np.interp(y(nans), y(~nans), sv_e2[~nans])
                except:
                    pass

                data = np.column_stack([sv_e1, sv_e2])
                k_means = MiniBatchKMeans(numclusters)
                # fit the model
                k_means.fit(data)
                values = k_means.cluster_centers_.squeeze()
                labels = k_means.labels_

                hardav = humutils.runningMeanFast(hard, integ)
                roughav = humutils.runningMeanFast(rough, integ)

                #f = open(sonpath+base+'rough_and_hard'+str(p)+'.csv', 'wt')
                f = open(
                    os.path.normpath(
                        os.path.join(sonpath, base + 'rough_and_hard' +
                                     str(0) + '.csv')), 'wt')
                writer = csv.writer(f)
                writer.writerow(
                    ('longitude', 'latitude', 'easting', 'northing', 'depth',
                     'roughness', 'hardness', 'average roughness',
                     'average hardness', 'k-mean label'))
                for i in range(0, len(rough)):
                    writer.writerow(
                        (float(Zlon[i]), float(Zlat[i]), float(Zes[i]),
                         float(Zns[i]), float(Zdepi[i]), float(rough[i]),
                         float(hard[i]), float(roughav[i]), float(hardav[i]),
                         labels[i].astype(int)))
                f.close()

                if doplot == 1:
                    try:

                        fig = plt.figure()
                        plt.imshow(dwnhi_fp, cmap='gray')
                        plt.plot(e1a, 'r')
                        plt.plot(e1b, 'y')
                        plt.plot(e2a, 'c')
                        plt.plot(e2b, 'm')
                        plt.axis('tight')
                        #plt.show()
                        custom_save(sonpath, 'e1e2_scan' + str(0))
                        del fig

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:

                        fig = plt.figure()
                        fig.subplots_adjust(wspace=0.4, hspace=0.4)
                        plt.subplot(221)
                        plt.plot(sv_e1[labels == 0], sv_e2[labels == 0], 'ko')
                        plt.plot(sv_e1[labels == 1], sv_e2[labels == 1], 'ro')
                        plt.plot(sv_e1[labels == 2], sv_e2[labels == 2], 'bo')
                        plt.xlabel('SV1')
                        plt.ylabel('SV2')
                        plt.xlim(0, 1)
                        plt.ylim(0, 1)

                        plt.subplot(222)
                        plt.plot(rough[labels == 0], hard[labels == 0], 'ko')
                        plt.plot(rough[labels == 1], hard[labels == 1], 'ro')
                        plt.plot(rough[labels == 2], hard[labels == 2], 'bo')
                        plt.xlabel('E1')
                        plt.ylabel('E2')
                        plt.xlim(1, 8)
                        plt.ylim(1, 8)
                        #plt.show()
                        custom_save(sonpath, 'e1e2_kmeans' + str(0))
                        del fig

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:

                        fig = plt.figure()
                        s = plt.scatter(Zes[labels == 0],
                                        Zns[labels == 0],
                                        marker='o',
                                        c='k',
                                        s=10,
                                        linewidth=0,
                                        vmin=0,
                                        vmax=8)
                        s = plt.scatter(Zes[labels == 1],
                                        Zns[labels == 1],
                                        marker='o',
                                        c='r',
                                        s=10,
                                        linewidth=0,
                                        vmin=0,
                                        vmax=8)
                        s = plt.scatter(Zes[labels == 2],
                                        Zns[labels == 2],
                                        marker='o',
                                        c='b',
                                        s=10,
                                        linewidth=0,
                                        vmin=0,
                                        vmax=8)
                        custom_save(sonpath, 'rgh_hard_kmeans' + str(0))
                        del fig

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:

                        print("drawing and printing map ...")
                        fig = plt.figure(frameon=False)
                        #fig.subplots_adjust(wspace = 0.4, hspace=0.4)
                        map = Basemap(
                            projection='merc',
                            epsg=cs2cs_args.split(':')[1],  #epsg=26949,
                            resolution='i',  #h #f
                            llcrnrlon=np.min(Zlon) - 0.0001,
                            llcrnrlat=np.min(Zlat) - 0.0001,
                            urcrnrlon=np.max(Zlon) + 0.0001,
                            urcrnrlat=np.max(Zlat) + 0.0001)

                        # draw point cloud
                        x, y = map.projtran(Zlon, Zlat)

                        cs = map.scatter(x.flatten(),
                                         y.flatten(),
                                         1,
                                         rough.flatten(),
                                         linewidth=0,
                                         vmin=0,
                                         vmax=8)

                        try:
                            map.arcgisimage(
                                server='http://server.arcgisonline.com/ArcGIS',
                                service='ESRI_Imagery_World_2D',
                                xpixels=1000,
                                ypixels=None,
                                dpi=300)
                        except:
                            map.arcgisimage(
                                server='http://server.arcgisonline.com/ArcGIS',
                                service='World_Imagery',
                                xpixels=1000,
                                ypixels=None,
                                dpi=300)

                        cbar = map.colorbar(cs, location='bottom', pad="5%")
                        cbar.set_label('E1')
                        cbar.set_ticks([0, 2, 4, 6, 8])

                        custom_save(sonpath, 'map_rgh' + str(0))
                        del fig

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:
                        fig = plt.figure()
                        #fig.subplots_adjust(wspace = 0.4, hspace=0.4)
                        map = Basemap(
                            projection='merc',
                            epsg=cs2cs_args.split(':')[1],
                            resolution='i',  #h #f
                            llcrnrlon=np.min(Zlon) - 0.0001,
                            llcrnrlat=np.min(Zlat) - 0.0001,
                            urcrnrlon=np.max(Zlon) + 0.0001,
                            urcrnrlat=np.max(Zlat) + 0.0001)

                        # draw point cloud
                        x, y = map.projtran(Zlon, Zlat)

                        cs = map.scatter(x.flatten(),
                                         y.flatten(),
                                         1,
                                         hard.flatten(),
                                         linewidth=0,
                                         vmin=0,
                                         vmax=8)

                        try:
                            map.arcgisimage(
                                server='http://server.arcgisonline.com/ArcGIS',
                                service='ESRI_Imagery_World_2D',
                                xpixels=1000,
                                ypixels=None,
                                dpi=300)
                        except:
                            map.arcgisimage(
                                server='http://server.arcgisonline.com/ArcGIS',
                                service='World_Imagery',
                                xpixels=1000,
                                ypixels=None,
                                dpi=300)

                        cbar = map.colorbar(cs, location='bottom', pad="5%")
                        cbar.set_label('E2')
                        cbar.set_ticks([0, 2, 4, 6, 8])

                        custom_save(sonpath, 'map_hard' + str(0))
                        del fig

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:
                        print("drawing and printing map ...")
                        fig = plt.figure(frameon=False)
                        map = Basemap(
                            projection='merc',
                            epsg=cs2cs_args.split(':')[1],  #26949,
                            resolution='i',  #h #f
                            llcrnrlon=np.min(Zlon) - 0.001,
                            llcrnrlat=np.min(Zlat) - 0.001,
                            urcrnrlon=np.max(Zlon) + 0.001,
                            urcrnrlat=np.max(Zlat) + 0.001)

                        ax = plt.Axes(
                            fig,
                            [0., 0., 1., 1.],
                        )
                        ax.set_axis_off()
                        fig.add_axes(ax)

                        ## draw point cloud
                        x, y = map.projtran(Zlon, Zlat)
                        map.scatter(x.flatten(),
                                    y.flatten(),
                                    1,
                                    rough.flatten(),
                                    linewidth='0',
                                    vmin=0,
                                    vmax=8)

                        custom_save(sonpath, 'Rough' + str(0))
                        del fig

                        kml = simplekml.Kml()
                        ground = kml.newgroundoverlay(name='GroundOverlay')
                        ground.icon.href = 'Rough' + str(0) + '.png'
                        ground.latlonbox.north = np.min(Zlat) - 0.001
                        ground.latlonbox.south = np.max(Zlat) + 0.001
                        ground.latlonbox.east = np.max(Zlon) + 0.001
                        ground.latlonbox.west = np.min(Zlon) - 0.001
                        ground.latlonbox.rotation = 0

                        #kml.save(sonpath+'Rough'+str(p)+'.kml')
                        kml.save(
                            os.path.normpath(
                                os.path.join(sonpath,
                                             'Rough' + str(0) + '.kml')))

                    except:
                        print("plot could not be produced")

                if doplot == 1:
                    try:
                        print("drawing and printing map ...")
                        fig = plt.figure(frameon=False)
                        map = Basemap(
                            projection='merc',
                            epsg=cs2cs_args.split(':')[1],  #26949,
                            resolution='i',  #h #f
                            llcrnrlon=np.min(Zlon) - 0.001,
                            llcrnrlat=np.min(Zlat) - 0.001,
                            urcrnrlon=np.max(Zlon) + 0.001,
                            urcrnrlat=np.max(Zlat) + 0.001)

                        ax = plt.Axes(
                            fig,
                            [0., 0., 1., 1.],
                        )
                        ax.set_axis_off()
                        fig.add_axes(ax)

                        ## draw point cloud
                        x, y = map.projtran(Zlon, Zlat)
                        map.scatter(x.flatten(),
                                    y.flatten(),
                                    1,
                                    hard.flatten(),
                                    linewidth='0',
                                    vmin=0,
                                    vmax=8)

                        custom_save(sonpath, 'Hard' + str(0))
                        del fig

                        kml = simplekml.Kml()
                        ground = kml.newgroundoverlay(name='GroundOverlay')
                        ground.icon.href = 'Hard' + str(0) + '.png'
                        ground.latlonbox.north = np.min(Zlat) - 0.001
                        ground.latlonbox.south = np.max(Zlat) + 0.001
                        ground.latlonbox.east = np.max(Zlon) + 0.001
                        ground.latlonbox.west = np.min(Zlon) - 0.001
                        ground.latlonbox.rotation = 0

                        #kml.save(sonpath+'Hard'+str(p)+'.kml')
                        kml.save(
                            os.path.normpath(
                                os.path.join(sonpath,
                                             'Hard' + str(0) + '.kml')))

                    except:
                        print("plot could not be produced")

    else:
        print("high-frequency downward echosounder data not available")
コード例 #2
0
def texture2(humfile, sonpath, win, doplot, numclasses):
    '''
      Create a texture lengthscale map using the algorithm detailed by Buscombe et al. (2015)
      This textural lengthscale is not a direct measure of grain size. Rather, it is a statistical 
      representation that integrates over many attributes of bed texture, of which grain size is the most important. 
      The technique is a physically based means to identify regions of texture within a sidescan echogram, 
      and could provide a basis for objective, automated riverbed sediment classification.

      Syntax
      ----------
      [] = PyHum.texture(humfile, sonpath, win, doplot, numclasses)

      Parameters
      ----------
      humfile : str
       path to the .DAT file
      sonpath : str
       path where the *.SON files are
      win : int, *optional* [Default=10]
       pixel in pixels of the moving window
      doplot : int, *optional* [Default=1]
       if 1, make plots, otherwise do not make plots
      numclasses : int, *optional* [Default=4]
       number of 'k means' that the texture lengthscale will be segmented into

      Returns
      -------
      sonpath+base+'_data_class.dat': memory-mapped file
        contains the texture lengthscale map

      sonpath+base+'_data_kclass.dat': memory-mapped file
        contains the k-means segmented texture lengthscale map

      References
      ----------
      .. [1] Buscombe, D., Grams, P.E., and Smith, S.M.C., 2015, Automated riverbed sediment
       classification using low-cost sidescan sonar. Journal of Hydraulic Engineering 10.1061/(ASCE)HY.1943-7900.0001079, 06015019.
      '''

    # prompt user to supply file if no input file given
    if not humfile:
        print('An input file is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        humfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print('A *.SON directory is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print('Input file is %s' % (humfile))

    if sonpath:
        print('Sonar file path is %s' % (sonpath))

    if win:
        win = np.asarray(win, int)
        print('Window is %s square pixels' % (str(win)))

    if numclasses:
        numclasses = np.asarray(numclasses, int)
        print('Number of sediment classes: %s' % (str(numclasses)))

    if doplot:
        doplot = int(doplot)
        if doplot == 0:
            print("Plots will not be made")

    print('[Default] Number of processors is %s' % (str(cpu_count())))

    ########################################################
    ########################################################

    # start timer
    if os.name == 'posix':  # true if linux/mac or cygwin on windows
        start = time.time()
    else:  # windows
        start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')))

    ft = 1 / loadmat(sonpath + base + 'meta.mat')['pix_m']
    #pix_m = np.squeeze(meta['pix_m'])
    #dep_m = np.squeeze(meta['dep_m'])
    dist_m = np.squeeze(meta['dist_m'])

    ### port
    print("processing port side ...")
    # load memory mapped scan ... port
    shape_port = np.squeeze(meta['shape_port'])
    if shape_port != '':

        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_port_lar.dat'))):
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat',
                                       'float32', tuple(shape_port))
        else:
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat',
                                       'float32', tuple(shape_port))

        port_fp2 = io.get_mmap_data(sonpath, base, '_data_port_l.dat',
                                    'float32', tuple(shape_port))

    ### star
    print("processing starboard side ...")
    # load memory mapped scan ... port
    shape_star = np.squeeze(loadmat(sonpath + base + 'meta.mat')['shape_star'])
    if shape_star != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_star_lar.dat'))):
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat',
                                       'float32', tuple(shape_star))
        else:
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat',
                                       'float32', tuple(shape_star))

        star_fp2 = io.get_mmap_data(sonpath, base, '_data_star_l.dat',
                                    'float32', tuple(shape_star))

    if len(shape_star) > 2:
        shape = shape_port.copy()
        shape[1] = shape_port[1] + shape_star[1]
    else:
        shape = []
        shape.append(1)
        shape.append(shape_port[0])
        shape.append(shape_port[1])
        shape[1] = shape_port[0] + shape_star[0]

    # create memory mapped file for Sp
    #with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'w+') as ff:
    #   fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))
    fp = np.zeros(tuple(shape), dtype='float32')

    if len(shape_star) > 2:

        for p in range(len(port_fp)):

            merge = np.vstack((np.flipud(port_fp[p]), star_fp[p]))
            merge = denoise_tv_chambolle(merge.copy(),
                                         weight=2,
                                         multichannel=False).astype('float32')
            Snn = std_convoluted(merge, win)[1]
            del merge

            try:
                Snn = medfilt2d(Snn, (win + 1, win + 1))
            except:
                Snn = medfilt2d(Snn, (win, win))

            Snn[np.isnan(np.vstack(
                (np.flipud(port_fp[p]), star_fp[p])))] = np.nan
            Snn[np.isnan(np.vstack(
                (np.flipud(port_fp2[p]), star_fp2[p])))] = np.nan

            R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat',
                                    'float32', tuple(shape_star))

            R = np.vstack((np.flipud(R_fp[0]), R_fp[0]))

            R = R / np.max(R)

            rn = replace_nans.RN(R.astype('float64'), 1000, 0.01, 2,
                                 'localmean')
            R = rn.getdata()
            del rn

            Sp = (Snn**2) * np.cos(np.deg2rad(R)) / win  ##**2

            fp[p] = Sp.astype('float32')
            del Sp

        #del fp # flush data to file
        shape = io.set_mmap_data(sonpath, base, '_data_class.dat', 'float32',
                                 np.squeeze(fp))
        del fp
        class_fp = io.get_mmap_data(sonpath, base, '_data_class.dat',
                                    'float32', tuple(shape))

    else:

        merge = np.vstack((np.flipud(port_fp), star_fp))
        merge = denoise_tv_chambolle(merge.copy(),
                                     weight=2,
                                     multichannel=False).astype('float32')
        Snn = std_convoluted(merge, win)[1]
        del merge

        try:
            Snn = medfilt2d(Snn, (win + 1, win + 1))
        except:
            Snn = medfilt2d(Snn, (win, win))

        Snn[np.isnan(np.vstack((np.flipud(port_fp), star_fp)))] = np.nan
        Snn[np.isnan(np.vstack((np.flipud(port_fp2), star_fp2)))] = np.nan

        R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32',
                                tuple(shape_star))

        R = np.vstack((np.flipud(R_fp), R_fp))
        R = R / np.max(R)

        rn = replace_nans.RN(R.astype('float64'), 1000, 0.01, 2, 'localmean')
        R = rn.getdata()
        del rn

        Sp = (Snn**2) * np.cos(np.deg2rad(R)) / win  ##**2

        shape = io.set_mmap_data(sonpath, base, '_data_class.dat', 'float32',
                                 np.squeeze(Sp))

        #with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'w+') as ff:
        #   np.save(ff, np.squeeze(Sp).astype('float32'))

        #with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'r') as ff:
        #   class_fp = np.load(ff)

        #del Sp
        class_fp = io.get_mmap_data(sonpath, base, '_data_class.dat',
                                    'float32', tuple(shape))

    dist_m = np.squeeze(loadmat(sonpath + base + 'meta.mat')['dist_m'])

    ########################################################
    if doplot == 1:
        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_class(dist_m, shape_port, port_fp[p], star_fp[p],
                           class_fp[p], ft, humfile, sonpath, base, p)
        else:
            plot_class(dist_m, shape_port, port_fp, star_fp, class_fp, ft,
                       humfile, sonpath, base, 0)

        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_contours(dist_m, shape_port, port_fp[p], star_fp[p],
                              class_fp[p], ft, humfile, sonpath, base,
                              numclasses, p)
        else:
            plot_contours(dist_m, shape_port, port_fp, star_fp, class_fp, ft,
                          humfile, sonpath, base, numclasses, 0)

    #######################################################
    # k-means

    if len(shape_star) > 2:
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'w+') as ff:
            fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))

        for p in range(len(port_fp)):
            wc = get_kclass(class_fp[p].copy(), numclasses)
            fp[p] = wc.astype('float32')
            del wc

        del fp

        kclass_fp = io.get_mmap_data(sonpath, base, '_data_kclass.dat',
                                     'float32', tuple(shape))

    else:
        wc = get_kclass(class_fp.copy(), numclasses)

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'w+') as ff:
            np.save(ff, np.squeeze(wc).astype('float32'))

        del wc

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'r') as ff:
            kclass_fp = np.load(ff)

    ########################################################
    if doplot == 1:

        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_kmeans(dist_m, shape_port, port_fp[p], star_fp[p],
                            kclass_fp[p], ft, humfile, sonpath, base, p)
        else:
            plot_kmeans(dist_m, shape_port, port_fp, star_fp, kclass_fp, ft,
                        humfile, sonpath, base, 0)

    if os.name == 'posix':  # true if linux/mac
        elapsed = (time.time() - start)
    else:  # windows
        elapsed = (time.clock() - start)
    print("Processing took " + str(elapsed) + "seconds to analyse")

    print("Done!")
    print("===================================================")
コード例 #3
0
def map(humfile, sonpath, cs2cs_args, res, mode, nn, numstdevs,
        use_uncorrected, scalemax):  #dogrid = 1, influence = 1, dowrite = 0,
    '''
    Create plots of the spatially referenced sidescan echograms

    Syntax
    ----------
    [] = PyHum.map(humfile, sonpath, cs2cs_args, res, mode, nn, numstdevs)

    Parameters
    ----------
    humfile : str
       path to the .DAT file
    sonpath : str
       path where the *.SON files are
    cs2cs_args : int, *optional* [Default="epsg:26949"]
       arguments to create coordinates in a projected coordinate system
       this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates
       into any projection supported by the proj.4 libraries
    res : float, *optional* [Default=0]
       grid resolution of output gridded texture map
       if res=0, res will be determined automatically from the spatial resolution of 1 pixel
    mode: int, *optional* [Default=3]
       gridding mode. 1 = nearest neighbour
                      2 = inverse weighted nearest neighbour
                      3 = Gaussian weighted nearest neighbour
    nn: int, *optional* [Default=64]
       number of nearest neighbours for gridding (used if mode > 1)
    numstdevs: int, *optional* [Default = 4]
       Threshold number of standard deviations in sidescan intensity per grid cell up to which to accept


    Returns
    -------
    sonpath+'x_y_ss_raw'+str(p)+'.asc'  : text file
        contains the point cloud of easting, northing, and sidescan intensity
        of the pth chunk

    sonpath+'GroundOverlay'+str(p)+'.kml': kml file
        contains gridded (or point cloud) sidescan intensity map for importing into google earth
        of the pth chunk

    sonpath+'map'+str(p)+'.png' :
        image overlay associated with the kml file

    '''

    # prompt user to supply file if no input file given
    if not humfile:
        print('An input file is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        humfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print('A *.SON directory is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print('Input file is %s' % (humfile))

    if sonpath:
        print('Sonar file path is %s' % (sonpath))

    if cs2cs_args:
        print('cs2cs arguments are %s' % (cs2cs_args))

    if res:
        res = np.asarray(res, float)
        print('Gridding resolution: %s' % (str(res)))

    if mode:
        mode = int(mode)
        print('Mode for gridding: %s' % (str(mode)))

    if nn:
        nn = int(nn)
        print('Number of nearest neighbours for gridding: %s' % (str(nn)))

    if numstdevs:
        numstdevs = int(numstdevs)
        print(
            'Threshold number of standard deviations in sidescan intensity per grid cell up to which to accept: %s'
            % (str(numstdevs)))

    if use_uncorrected:
        use_uncorrected = int(use_uncorrected)
        if use_uncorrected == 1:
            print("Radiometrically uncorrected scans will be used")

    # start timer
    if os.name == 'posix':  # true if linux/mac or cygwin on windows
        start = time.time()
    else:  # windows
        start = time.clock()

    #trans =  pyproj.Proj(init=cs2cs_args)

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')))

    esi = np.squeeze(meta['e'])
    nsi = np.squeeze(meta['n'])

    theta = np.squeeze(meta['heading']) / (180 / np.pi)

    # load memory mapped scans
    shape_port = np.squeeze(meta['shape_port'])
    shape_star = np.squeeze(meta['shape_star'])

    if use_uncorrected == 1:
        print("using uncorrected scans")
        if shape_port != '':
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_l.dat',
                                       'float32', tuple(shape_port))
        if shape_port != '':
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_l.dat',
                                       'float32', tuple(shape_star))

    else:
        if shape_port != '':
            if os.path.isfile(
                    os.path.normpath(
                        os.path.join(sonpath, base + '_data_port_lar.dat'))):
                port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat',
                                           'float32', tuple(shape_port))
            else:
                port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat',
                                           'float32', tuple(shape_port))

        if shape_star != '':
            if os.path.isfile(
                    os.path.normpath(
                        os.path.join(sonpath, base + '_data_star_lar.dat'))):
                star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat',
                                           'float32', tuple(shape_star))
            else:
                star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat',
                                           'float32', tuple(shape_star))

    # time varying gain
    tvg = ((8.5 * 10**-5) + (3 / 76923) + ((8.5 * 10**-5) / 4)) * meta['c']

    # depth correction
    dist_tvg = np.squeeze((
        (np.tan(np.radians(25))) * np.squeeze(meta['dep_m'])) - (tvg))

    # read in range data
    R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32',
                            tuple(shape_star))

    # for debugging/testing
    #    p=0
    #    e = esi[shape_port[-1]*p:shape_port[-1]*(p+1)]
    #    n = nsi[shape_port[-1]*p:shape_port[-1]*(p+1)]
    #    t = theta[shape_port[-1]*p:shape_port[-1]*(p+1)]
    #    d = dist_tvg[shape_port[-1]*p:shape_port[-1]*(p+1)]
    #    dat_port = port_fp[p]
    #    dat_star = star_fp[p]
    #    data_R = R_fp[p]
    #    dx=np.arcsin(meta['c']/(1000*meta['t']*meta['f']))

    #    e = esi;# del esi
    #    n = nsi; #del nsi
    #    t = theta;# del theta
    #    d = dist_tvg;# del dist_tvg
    #    dat_port = port_fp;# del port_fp
    #    dat_star = star_fp; #del star_fp
    #    data_R = R_fp; #del R_fp

    dx = np.arcsin(meta['c'] / (1000 * meta['t'] * meta['f']))
    pix_m = meta['pix_m'] * 1.1
    c = meta['c']

    if res == 0:
        res = 99

    print("Number of chunks for mapping: %s" % (len(star_fp)))

    if len(shape_star) > 2:
        for p in range(len(star_fp)):
            try:
                print("progress: " + str(p) + " / " + str(len(star_fp)))
                res = make_map(
                    esi[shape_port[-1] * p:shape_port[-1] * (p + 1)],
                    nsi[shape_port[-1] * p:shape_port[-1] * (p + 1)],
                    theta[shape_port[-1] * p:shape_port[-1] * (p + 1)],
                    dist_tvg[shape_port[-1] * p:shape_port[-1] * (p + 1)],
                    port_fp[p], star_fp[p], R_fp[p], meta['pix_m'], res,
                    cs2cs_args, sonpath, p, mode, nn, numstdevs, meta['c'],
                    np.arcsin(meta['c'] / (1000 * meta['t'] * meta['f'])),
                    use_uncorrected, scalemax)  #dogrid, influence, dowrite,
                print("grid resolution is %s" % (str(res)))
            except:
                print("error on chunk " + str(p))
    else:
        res = make_map(esi, nsi, theta, dist_tvg, port_fp, star_fp, R_fp,
                       meta['pix_m'], res, cs2cs_args, sonpath, 0, mode, nn,
                       numstdevs, meta['c'],
                       np.arcsin(meta['c'] / (1000 * meta['t'] * meta['f'])),
                       use_uncorrected, scalemax)  #dogrid, influence,dowrite,

    if os.name == 'posix':  # true if linux/mac
        elapsed = (time.time() - start)
    else:  # windows
        elapsed = (time.clock() - start)
    print("Processing took " + str(elapsed) + "seconds to analyse")

    print("Done!")
    print("===================================================")
コード例 #4
0
def correct(humfile, sonpath, maxW, doplot, dofilt, correct_withwater, ph,
            temp, salinity, dconcfile):
    '''
    Remove water column and carry out some rudimentary radiometric corrections, 
    accounting for directivity and attenuation with range

    Syntax
    ----------
    [] = PyHum.correct(humfile, sonpath, maxW, doplot, correct_withwater, ph, temp, salinity, dconcfile)

    Parameters
    ----------
    humfile : str
       path to the .DAT file

    sonpath : str
       path where the *.SON files are

    maxW : int, *optional* [Default=1000]
       maximum transducer power

    doplot : int, *optional* [Default=1]
       1 = make plots, otherwise do not

    dofilt : int, *optional* [Default=0]
       1 = apply a phase preserving filter to the scans

    correct_withwater : int, *optional* [Default=0]
       1 = apply radiometric correction but don't remove water column from scans

    ph : float, *optional* [Default=7.0]
       water acidity in pH

    temp : float, *optional* [Default=10.0]
       water temperature in degrees Celsius

    salinity : float, *optional* [Default=0.0]
       salinity of water in parts per thousand

    dconcfile : str, *optional* [Default=None]
       file path of a text file containing sediment concentration data
       this file must contain the following fields separated by spaces:
       size (microns) conc (mg/L) dens (kg/m3)
       with one row per grain size, for example:
       30 1700 2200
       100 15 2650

    Returns
    -------
    sonpath+base+'_data_star_l.dat': memory-mapped file
        contains the starboard scan with water column removed

    sonpath+base+'_data_port_l.dat': memory-mapped file
        contains the portside scan with water column removed

    sonpath+base+'_data_star_la.dat': memory-mapped file
        contains the starboard scan with water column removed and 
        radiometrically corrected

    sonpath+base+'_data_port_la.dat': memory-mapped file
        contains the portside scan with water column removed and
        radiometrically corrected

    sonpath+base+'_data_range.dat': memory-mapped file
        contains the cosine of the range which is used to correct
        for attenuation with range

    sonpath+base+'_data_dwnlow_l.dat': memory-mapped file
        contains the low freq. downward scan with water column removed

    sonpath+base+'_data_dwnhi_l.dat': memory-mapped file
        contains the high freq. downward  scan with water column removed

    sonpath+base+'_data_dwnlow_la.dat': memory-mapped file
        contains the low freq. downward  scan with water column removed and 
        radiometrically corrected

    sonpath+base+'_data_dwnhi_la.dat': memory-mapped file
        contains the high freq. downward  scan with water column removed and
        radiometrically corrected
    
    if correct_withwater == 1:
    
       sonpath+base+'_data_star_lw.dat': memory-mapped file
           contains the starboard scan with water column retained and 
           radiometrically corrected

       sonpath+base+'_data_port_lw.dat': memory-mapped file
           contains the portside scan with water column retained and
           radiometrically corrected

    '''

    # prompt user to supply file if no input file given
    if not humfile:
        print('An input file is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        inputfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print('A *.SON directory is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print('Input file is %s' % (humfile))

    if sonpath:
        print('Sonar file path is %s' % (sonpath))

    if maxW:
        maxW = np.asarray(maxW, float)
        print('Max. transducer power is %s W' % (str(maxW)))

    if doplot:
        doplot = int(doplot)
        if doplot == 0:
            print("Plots will not be made")

    if dofilt:
        dofilt = int(dofilt)
        if dofilt == 0:
            print("Phase preserving filter will not be applied")
        else:
            print("Phase preserving filter will be applied")

    if correct_withwater:
        correct_withwater = int(correct_withwater)
        if correct_withwater == 1:
            print("Correction will be applied without removing water column")

    if salinity:
        salinity = np.asarray(salinity, float)
        print('Salinity is %s ppt' % (str(salinity)))

    if ph:
        ph = np.asarray(ph, float)
        print('pH is %s' % (str(ph)))

    if temp:
        temp = np.asarray(temp, float)
        print('Temperature is %s' % (str(temp)))

    if dconcfile is not None:
        try:
            print('Suspended sediment size/conc. file is %s' % (dconcfile))
            dconc = np.genfromtxt(dconcfile).T
            conc = dconc[1]
            dens = dconc[2]
            d = dconc[0]
        except:
            pass

    #================================
    # start timer
    if os.name == 'posix':  # true if linux/mac or cygwin on windows
        start = time.time()
    else:  # windows
        start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    # add wattage to metadata dict
    meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')))

    dep_m = meta['dep_m'][0]
    pix_m = meta['pix_m'][0]

    meta['maxW'] = maxW
    savemat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')),
            meta,
            oned_as='row')

    bed = np.squeeze(meta['bed'])
    ft = 1 / (meta['pix_m'])
    dist_m = np.squeeze(meta['dist_m'])

    try:
        if dconcfile is not None:
            # sediment attenuation
            alpha = sed_atten(meta['f'], conc, dens, d, meta['c'])
        else:
            alpha = 0
    except:
        alpha = 0

    # load memory mapped scans
    shape_port = np.squeeze(meta['shape_port'])
    if shape_port != '':

        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_port2.dat'))):
            port_fp = io.get_mmap_data(sonpath, base, '_data_port2.dat',
                                       'int16', tuple(shape_port))

        else:
            port_fp = io.get_mmap_data(sonpath, base, '_data_port.dat',
                                       'int16', tuple(shape_port))

    shape_star = np.squeeze(meta['shape_star'])
    if shape_star != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_star2.dat'))):
            star_fp = io.get_mmap_data(sonpath, base, '_data_star2.dat',
                                       'int16', tuple(shape_star))

        else:
            star_fp = io.get_mmap_data(sonpath, base, '_data_star.dat',
                                       'int16', tuple(shape_star))

    if len(shape_star) == 2:
        extent = shape_star[0]
    else:
        extent = shape_star[1]  #np.shape(data_port)[0]

    bed = np.asarray(bed, 'int') + int(0.25 * ft)

    # calculate in dB
    ######### star
    Zt, R, A = remove_water(star_fp, bed, shape_star, dep_m, pix_m, 1, maxW)

    Zt = np.squeeze(Zt)

    # create memory mapped file for Z)
    shape_star = io.set_mmap_data(sonpath, base, '_data_star_l.dat', 'float32',
                                  Zt)
    del Zt

    A = np.squeeze(A)
    # create memory mapped file for A
    shape_A = io.set_mmap_data(sonpath, base, '_data_incidentangle.dat',
                               'float32', A)
    del A

    R = np.squeeze(R)
    R[np.isnan(R)] = 0

    try:
        alpha_w = water_atten(R, meta['f'], meta['c'], ph, temp, salinity)
    except:
        alpha_w = 1e-5

    # compute transmission losses
    TL = (40 * np.log10(R) + alpha_w + (2 * alpha) * R / 1000) / 255
    del alpha_w

    # create memory mapped file for R
    shape_R = io.set_mmap_data(sonpath, base, '_data_range.dat', 'float32', R)
    del R

    TL[np.isnan(TL)] = 0
    TL[TL < 0] = 0
    shape_TL = io.set_mmap_data(sonpath, base, '_data_TL.dat', 'float32', TL)
    del TL

    A_fp = io.get_mmap_data(sonpath, base, '_data_incidentangle.dat',
                            'float32', shape_star)
    TL_fp = io.get_mmap_data(sonpath, base, '_data_TL.dat', 'float32',
                             shape_star)

    R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32',
                            shape_star)

    if correct_withwater == 1:
        Zt = correct_scans(star_fp, A_fp, TL_fp, dofilt)

        # create memory mapped file for Z)
        shape_star = io.set_mmap_data(sonpath, base, '_data_star_lw.dat',
                                      'float32', Zt)

    #we are only going to access the portion of memory required
    star_fp = io.get_mmap_data(sonpath, base, '_data_star_l.dat', 'float32',
                               shape_star)

    ##Zt = correct_scans(star_fp, A_fp, TL_fp, dofilt)

    #phi=1.69
    alpha = 59  # vertical beam width at 3db
    theta = 35  #opening angle theta

    # lambertian correction
    Zt = correct_scans_lambertian(star_fp, A_fp, TL_fp, R_fp, meta['c'],
                                  meta['f'], theta, alpha)

    Zt = np.squeeze(Zt)

    avg = np.nanmedian(Zt, axis=0)
    ##avg = median_filter(avg,int(len(avg)/10))

    Zt2 = Zt - avg + np.nanmean(avg)
    Zt2 = Zt2 + np.abs(np.nanmin(Zt2))

    try:
        Zt2 = median_filter(Zt2, (3, 3))
    except:
        pass

    ##Zt2 = np.empty(np.shape(Zt))
    ##for kk in range(np.shape(Zt)[1]):
    ##   Zt2[:,kk] = (Zt[:,kk] - avg) + np.nanmean(avg)
    ##Zt2[Zt<=0] = np.nan
    ##Zt2[Zt2<=0] = np.nan
    del Zt

    # create memory mapped file for Z
    shape_star = io.set_mmap_data(sonpath, base, '_data_star_la.dat',
                                  'float32', Zt2)
    del Zt2

    #we are only going to access the portion of memory required
    star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat', 'float32',
                               shape_star)

    ######### port
    if correct_withwater == 1:
        Zt = correct_scans(port_fp, A_fp, TL, dofilt)

        # create memory mapped file for Z)
        shape_port = io.set_mmap_data(sonpath, base, '_data_port_lw.dat',
                                      'float32', Zt)

    Zt = remove_water(port_fp, bed, shape_port, dep_m, pix_m, 0, maxW)

    Zt = np.squeeze(Zt)

    # create memory mapped file for Z
    shape_port = io.set_mmap_data(sonpath, base, '_data_port_l.dat', 'float32',
                                  Zt)

    #we are only going to access the portion of memory required
    port_fp = io.get_mmap_data(sonpath, base, '_data_port_l.dat', 'float32',
                               shape_port)

    ##Zt = correct_scans(port_fp, A_fp, TL_fp, dofilt)

    # lambertian correction
    Zt = correct_scans_lambertian(port_fp, A_fp, TL_fp, R_fp, meta['c'],
                                  meta['f'], theta, alpha)

    Zt = np.squeeze(Zt)

    Zt2 = Zt - avg + np.nanmean(avg)
    Zt2 = Zt2 + np.abs(np.nanmin(Zt2))

    ##Zt2 = np.empty(np.shape(Zt))
    ##for kk in range(np.shape(Zt)[1]):
    ##   Zt2[:,kk] = (Zt[:,kk] - avg) + np.nanmean(avg)
    ##Zt2[Zt<=0] = np.nan
    ##Zt2[Zt2<=0] = np.nan
    del Zt

    # create memory mapped file for Z
    shape_port = io.set_mmap_data(sonpath, base, '_data_port_la.dat',
                                  'float32', Zt2)
    del Zt2

    #we are only going to access the portion of memory required
    port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat', 'float32',
                               shape_port)

    ## do plots of merged scans
    if doplot == 1:
        if correct_withwater == 1:

            port_fpw = io.get_mmap_data(sonpath, base, '_data_port_lw.dat',
                                        'float32', shape_port)

            star_fpw = io.get_mmap_data(sonpath, base, '_data_star_lw.dat',
                                        'float32', shape_star)

            if len(np.shape(star_fpw)) > 2:
                for p in range(len(star_fpw)):
                    plot_merged_scans(port_fpw[p], star_fpw[p], dist_m,
                                      shape_port, ft, sonpath, p)
            else:
                plot_merged_scans(port_fpw, star_fpw, dist_m, shape_port, ft,
                                  sonpath, 0)

        else:

            if len(np.shape(star_fp)) > 2:
                for p in range(len(star_fp)):
                    plot_merged_scans(port_fp[p], star_fp[p], dist_m,
                                      shape_port, ft, sonpath, p)
            else:
                plot_merged_scans(port_fp, star_fp, dist_m, shape_port, ft,
                                  sonpath, 0)

    # load memory mapped scans
    shape_low = np.squeeze(meta['shape_low'])
    shape_hi = np.squeeze(meta['shape_hi'])

    if shape_low != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_dwnlow2.dat'))):
            try:
                low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow2.dat',
                                          'int16', tuple(shape_low))

            except:
                low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat',
                                          'int16', tuple(shape_low))

            finally:
                low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat',
                                          'int16', tuple(shape_hi))

                #if 'shape_hi' in locals():
                #   low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow2.dat', 'int16', tuple(shape_hi))

        else:

            try:
                low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat',
                                          'int16', tuple(shape_low))

            except:
                if 'shape_hi' in locals():
                    low_fp = io.get_mmap_data(sonpath, base,
                                              '_data_dwnlow.dat', 'int16',
                                              tuple(shape_hi))

    shape_hi = np.squeeze(meta['shape_hi'])

    if shape_hi != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_dwnhi2.dat'))):
            try:
                hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi2.dat',
                                         'int16', tuple(shape_hi))

            except:
                hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat',
                                         'int16', tuple(shape_hi))

            finally:
                hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat',
                                         'int16', tuple(shape_low))

                #if 'shape_low' in locals():
                #   hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi2.dat', 'int16', tuple(shape_low))

        else:
            try:
                hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat',
                                         'int16', tuple(shape_hi))

            except:
                if 'shape_low' in locals():
                    hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat',
                                             'int16', tuple(shape_low))

    if 'low_fp' in locals():
        ######### low
        Zt = remove_water(low_fp, bed, shape_low, dep_m, pix_m, 0, maxW)
        Zt = np.squeeze(Zt)

        # create memory mapped file for Z
        shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow_l.dat',
                                     'float32', Zt)
        del Zt

        #we are only going to access the portion of memory required
        low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow_l.dat',
                                  'float32', shape_low)
        Zt = correct_scans2(low_fp, TL_fp)

        # create memory mapped file for Z
        shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow_la.dat',
                                     'float32', Zt)
        del Zt

        #we are only going to access the lowion of memory required
        low_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow_la.dat',
                                  'float32', shape_low)

        if doplot == 1:
            if len(np.shape(low_fp)) > 2:
                for p in range(len(low_fp)):
                    plot_dwnlow_scans(low_fp[p], dist_m, shape_low, ft,
                                      sonpath, p)
            else:
                plot_dwnlow_scans(low_fp, dist_m, shape_low, ft, sonpath, 0)

    if 'hi_fp' in locals():
        ######### hi
        Zt = remove_water(hi_fp, bed, shape_hi, dep_m, pix_m, 0, maxW)
        Zt = np.squeeze(Zt)

        # create memory mapped file for Z
        shape_hi = io.set_mmap_data(sonpath, base, '_data_dwnhi_l.dat',
                                    'float32', Zt)
        del Zt

        #we are only going to access the portion of memory required
        hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi_l.dat', 'float32',
                                 shape_hi)

        Zt = correct_scans2(hi_fp, TL_fp)

        # create memory mapped file for Z
        shape_hi = io.set_mmap_data(sonpath, base, '_data_dwnhi_la.dat',
                                    'float32', Zt)
        del Zt

        #we are only going to access the hiion of memory required
        hi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi_la.dat',
                                 'float32', shape_hi)

        if doplot == 1:
            if len(np.shape(hi_fp)) > 2:
                for p in range(len(hi_fp)):
                    plot_dwnhi_scans(hi_fp[p], dist_m, shape_hi, ft, sonpath,
                                     p)
            else:
                plot_dwnhi_scans(hi_fp, dist_m, shape_hi, ft, sonpath, 0)

    if os.name == 'posix':  # true if linux/mac
        elapsed = (time.time() - start)
    else:  # windows
        elapsed = (time.clock() - start)
    print("Processing took " + str(elapsed) + "seconds to analyse")

    print("Done!")
    print("===================================================")
コード例 #5
0
def texture_slic(humfile,
                 sonpath,
                 doplot=1,
                 numclasses=4,
                 maxscale=20,
                 notes=4):
    '''
      Create a texture lengthscale map using the algorithm detailed by Buscombe et al. (2015)
      This textural lengthscale is not a direct measure of grain size. Rather, it is a statistical 
      representation that integrates over many attributes of bed texture, of which grain size is the most important. 
      The technique is a physically based means to identify regions of texture within a sidescan echogram, 
      and could provide a basis for objective, automated riverbed sediment classification.

      Syntax
      ----------
      [] = PyHum.texture(humfile, sonpath, doplot, numclasses, maxscale, notes)

      Parameters
      ----------
      humfile : str
       path to the .DAT file
      sonpath : str
       path where the *.SON files are
      doplot : int, *optional* [Default=1]
       if 1, make plots, otherwise do not make plots
      numclasses : int, *optional* [Default=4]
       number of 'k means' that the texture lengthscale will be segmented into
      maxscale : int, *optional* [Default=20]
       Max scale as inverse fraction of data length for wavelet analysis
      notes : int, *optional* [Default=100]
       notes per octave for wavelet analysis

      Returns
      -------
      sonpath+base+'_data_class.dat': memory-mapped file
        contains the texture lengthscale map

      sonpath+base+'_data_kclass.dat': memory-mapped file
        contains the k-means segmented texture lengthscale map

      References
      ----------
      .. [1] Buscombe, D., Grams, P.E., and Smith, S.M.C., 2015, Automated riverbed sediment
       classification using low-cost sidescan sonar. Journal of Hydraulic Engineering 10.1061/(ASCE)HY.1943-7900.0001079, 06015019.
      '''

    # prompt user to supply file if no input file given
    if not humfile:
        print('An input file is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        humfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print('A *.SON directory is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print('Input file is %s' % (humfile))

    if sonpath:
        print('Sonar file path is %s' % (sonpath))

    if numclasses:
        numclasses = np.asarray(numclasses, int)
        print('Number of sediment classes: %s' % (str(numclasses)))

    if maxscale:
        maxscale = np.asarray(maxscale, int)
        print('Max scale as inverse fraction of data length: %s' %
              (str(maxscale)))

    if notes:
        notes = np.asarray(notes, int)
        print('Notes per octave: %s' % (str(notes)))

    if doplot:
        doplot = int(doplot)
        if doplot == 0:
            print("Plots will not be made")

    print('[Default] Number of processors is %s' % (str(cpu_count())))

    ########################################################
    ########################################################

    # start timer
    if os.name == 'posix':  # true if linux/mac or cygwin on windows
        start = time.time()
    else:  # windows
        start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')))

    ft = 1 / loadmat(sonpath + base + 'meta.mat')['pix_m']
    #pix_m = np.squeeze(meta['pix_m'])
    #dep_m = np.squeeze(meta['dep_m'])
    dist_m = np.squeeze(meta['dist_m'])

    ### port
    print("processing port side ...")
    # load memory mapped scan ... port
    shape_port = np.squeeze(meta['shape_port'])
    if shape_port != '':

        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_port_lar.dat'))):
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat',
                                       'float32', tuple(shape_port))
        else:
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat',
                                       'float32', tuple(shape_port))

        #port_fp2 = io.get_mmap_data(sonpath, base, '_data_port_l.dat', 'float32', tuple(shape_port))

    ### star
    print("processing starboard side ...")
    # load memory mapped scan ... port
    shape_star = np.squeeze(loadmat(sonpath + base + 'meta.mat')['shape_star'])
    if shape_star != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_star_lar.dat'))):
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat',
                                       'float32', tuple(shape_star))
        else:
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat',
                                       'float32', tuple(shape_star))

        #star_fp2 = io.get_mmap_data(sonpath, base, '_data_star_l.dat', 'float32', tuple(shape_star))

    if len(shape_star) > 2:
        shape = shape_port.copy()
        shape[1] = shape_port[1] + shape_star[1]
    else:
        shape = []
        shape.append(1)
        shape.append(shape_port[0])
        shape.append(shape_port[1])
        shape[1] = shape_port[0] + shape_star[0]

    #work on the entire scan
    #im = humutils.rescale(np.vstack((np.flipud(np.hstack(port_fp)), np.hstack(star_fp))),0,1)
    im = np.vstack((np.flipud(np.hstack(port_fp)), np.hstack(star_fp)))
    im[np.isnan(im)] = 0
    im = humutils.rescale(im, 0, 1)

    #get SLIC superpixels
    segments_slic = slic(im, n_segments=int(im.shape[0] / 10), compactness=.1)

    #pre-allocate texture lengthscale array
    tl = np.zeros(im.shape, dtype="float64")

    #cycle through each segment and compute tl
    for k in np.unique(segments_slic):
        mask = np.zeros(im.shape[:2], dtype="uint8")
        mask[segments_slic == k] = 255
        cmask, cim = crop_toseg(mask, im)
        tl[segments_slic == k] = parallel_me(cim, maxscale, notes,
                                             np.shape(cim)[0])

    R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32',
                            tuple(shape_star))
    R = np.vstack((np.flipud(np.hstack(R_fp)), np.hstack(R_fp)))
    R = R / np.max(R)

    #correct for range and scale
    tl = tl * np.cos(R) * (1 / ft)
    tl[im == 0] = np.nan
    tl[np.isnan(im)] = np.nan

    # create memory mapped file for Sp
    with open(
            os.path.normpath(os.path.join(sonpath, base + '_data_class.dat')),
            'w+') as ff:
        fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))

    counter = 0
    if len(shape_star) > 2:
        for p in range(len(port_fp)):
            if p == 0:
                n, m = np.shape(np.vstack((np.flipud(port_fp[p]), star_fp[p])))
            else:
                n, m = np.shape(np.vstack((np.flipud(port_fp[p]), star_fp[p])))
            Sp = tl[:n, counter:counter + m]
            counter = counter + m
            fp[p] = Sp.astype('float32')
            del Sp
        del fp  # flush data to file

        class_fp = io.get_mmap_data(sonpath, base, '_data_class.dat',
                                    'float32', tuple(shape))

    else:

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_class.dat')),
                'w+') as ff:
            np.save(ff, np.squeeze(Sp).astype('float32'))

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_class.dat')),
                'r') as ff:
            class_fp = np.load(ff)

    dist_m = np.squeeze(loadmat(sonpath + base + 'meta.mat')['dist_m'])

    ########################################################
    if doplot == 1:

        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_class(dist_m, shape_port, port_fp[p], star_fp[p],
                           class_fp[p], ft, humfile, sonpath, base, p)
        else:
            plot_class(dist_m, shape_port, port_fp, star_fp, class_fp, ft,
                       humfile, sonpath, base, 0)

        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_contours(dist_m, shape_port, class_fp[p], ft, humfile,
                              sonpath, base, numclasses, p)
        else:
            plot_contours(dist_m, shape_port, class_fp, ft, humfile, sonpath,
                          base, numclasses, 0)

    #######################################################
    # k-means

    if len(shape_star) > 2:
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'w+') as ff:
            fp = np.memmap(ff, dtype='float32', mode='w+', shape=tuple(shape))

        for p in range(len(port_fp)):
            wc = get_kclass(class_fp[p].copy(), numclasses)
            fp[p] = wc.astype('float32')
            del wc

        del fp

        kclass_fp = io.get_mmap_data(sonpath, base, '_data_kclass.dat',
                                     'float32', tuple(shape))

    else:
        wc = get_kclass(class_fp.copy(), numclasses)

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'w+') as ff:
            np.save(ff, np.squeeze(wc).astype('float32'))

        del wc

        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_kclass.dat')),
                'r') as ff:
            kclass_fp = np.load(ff)

    ########################################################
    if doplot == 1:

        if len(shape_star) > 2:
            for p in range(len(star_fp)):
                plot_kmeans(dist_m, shape_port, port_fp[p], star_fp[p],
                            kclass_fp[p], ft, humfile, sonpath, base, p)
        else:
            plot_kmeans(dist_m, shape_port, port_fp, star_fp, kclass_fp, ft,
                        humfile, sonpath, base, 0)

    if os.name == 'posix':  # true if linux/mac
        elapsed = (time.time() - start)
    else:  # windows
        elapsed = (time.clock() - start)
    print("Processing took " + str(elapsed) + "seconds to analyse")

    print("Done!")
コード例 #6
0
def rmshadows(humfile, sonpath, win, shadowmask, doplot, dissim, correl,
              contrast, energy, mn):
    '''
    Remove dark shadows in scans caused by shallows, shorelines, and attenuation of acoustics with distance
    Manual or automated processing options available
    Works on the radiometrically corrected outputs of the correct module

    Syntax
    ----------
    [] = PyHum.rmshadows(humfile, sonpath, win, shadowmask, doplot)

    Parameters
    ----------
    humfile : str
       path to the .DAT file
    sonpath : str
       path where the *.SON files are
    win : int, *optional* [Default=100]
       window size (pixels) for the automated shadow removal algorithm
    shadowmask : int, *optional* [Default=0]
       1 = do manual shadow masking, otherwise do automatic shadow masking
    doplot : int, *optional* [Default=1]
       1 = make plots, otherwise do not

    Returns
    -------
    sonpath+base+'_data_star_la.dat': memory-mapped file
        contains the starboard scan with water column removed and 
        radiometrically corrected, and shadows removed

    sonpath+base+'_data_port_la.dat': memory-mapped file
        contains the portside scan with water column removed and
        radiometrically corrected, and shadows removed

    '''

    # prompt user to supply file if no input file given
    if not humfile:
        print('An input file is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        humfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print('A *.SON directory is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print('Input file is %s' % (humfile))

    if sonpath:
        print('Sonar file path is %s' % (sonpath))

    if win:
        win = np.asarray(win, int)
        print('Window is %s square pixels' % (str(win)))

    if shadowmask:
        shadowmask = np.asarray(shadowmask, int)
        if shadowmask == 1:
            print('Shadow masking is manual')
        else:
            print('Shadow masking is auto')

    if doplot:
        doplot = int(doplot)
        if doplot == 0:
            print("Plots will not be made")

    if dissim:
        dissim = np.asarray(dissim, int)
        print('Threshold dissimilarity (shadow is <) is %s' % (str(dissim)))

    if correl:
        correl = np.asarray(correl, int)
        print('Threshold correlation (shadow is <) is %s' % (str(correl)))

    if contrast:
        contrast = np.asarray(contrast, int)
        print('Threshold contrast (shadow is <) is %s' % (str(contrast)))

    if energy:
        energy = np.asarray(energy, int)
        print('Threshold energy (shadow is >) is %s' % (str(energy)))

    if mn:
        mn = np.asarray(mn, int)
        print('Threshold mean intensity (shadow is <) is %s' % (str(mn)))

    # start timer
    if os.name == 'posix':  # true if linux/mac or cygwin on windows
        start = time.time()
    else:  # windows
        start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    base = humutils.strip_base(base)

    meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')))

    # load memory mapped scans
    shape_port = np.squeeze(meta['shape_port'])
    if shape_port != '':
        #port_fp = np.memmap(sonpath+base+'_data_port_la.dat', dtype='float32', mode='r', shape=tuple(shape_port))
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_port_la.dat')),
                'r') as ff:
            port_fp = np.memmap(ff,
                                dtype='float32',
                                mode='r',
                                shape=tuple(shape_port))

    shape_star = np.squeeze(meta['shape_star'])
    if shape_star != '':
        #star_fp = np.memmap(sonpath+base+'_data_star_la.dat', dtype='float32', mode='r', shape=tuple(shape_star))
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_star_la.dat')),
                'r') as ff:
            star_fp = np.memmap(ff,
                                dtype='float32',
                                mode='r',
                                shape=tuple(shape_star))

    dist_m = np.squeeze(meta['dist_m'])
    ft = 1 / (meta['pix_m'])
    extent = shape_star[1]

    if shadowmask == 1:  #manual

        Zt = []
        if len(np.shape(star_fp)) > 2:
            for p in range(len(star_fp)):
                raw_input(
                    "Shore picking " + str(p + 1) + " of " +
                    str(len(star_fp)) +
                    " (starboard), are you ready? 60 seconds. Press Enter to continue..."
                )
                shoreline_star = {}
                fig = plt.figure()
                ax = plt.gca()
                ax.imshow(star_fp[p], cmap='gray')  #, origin = 'upper') #im =
                plt.axis('normal')
                plt.axis('tight')
                pts1 = plt.ginput(
                    n=300,
                    timeout=75)  # it will wait for 200 clicks or 75 seconds
                x1 = map(lambda x: x[0],
                         pts1)  # map applies the function passed as
                y1 = map(lambda x: x[1],
                         pts1)  # first parameter to each element of pts
                shoreline_star = np.interp(np.r_[:np.shape(star_fp[p])[1]], x1,
                                           y1)
                plt.close()
                del fig

                star_mg = star_fp[p].copy()

                shoreline_star = np.asarray(shoreline_star, 'int')
                # shift proportionally depending on where the bed is
                for k in range(np.shape(star_mg)[1]):
                    star_mg[shoreline_star[k]:, k] = np.nan

                del shoreline_star

                Zt.append(star_mg)

        else:

            raw_input(
                "Shore picking " + str(len(star_fp)) + " of " +
                str(len(star_fp)) +
                " (starboard), are you ready? 60 seconds. Press Enter to continue..."
            )
            shoreline_star = {}
            fig = plt.figure()
            ax = plt.gca()
            ax.imshow(star_fp, cmap='gray')  #, origin = 'upper') #im =
            plt.axis('normal')
            plt.axis('tight')
            pts1 = plt.ginput(
                n=300, timeout=75)  # it will wait for 200 clicks or 75 seconds
            x1 = map(lambda x: x[0],
                     pts1)  # map applies the function passed as
            y1 = map(lambda x: x[1],
                     pts1)  # first parameter to each element of pts
            shoreline_star = np.interp(np.r_[:np.shape(star_fp)[1]], x1, y1)
            plt.close()
            del fig

            star_mg = star_fp.copy()

            shoreline_star = np.asarray(shoreline_star, 'int')
            # shift proportionally depending on where the bed is
            for k in range(np.shape(star_mg)[1]):
                star_mg[shoreline_star[k]:, k] = np.nan

            del shoreline_star

            Zt.append(star_mg)

        ## create memory mapped file for Z
        #p = np.memmap(sonpath+base+'_data_star_la.dat', dtype='float32', mode='w+', shape=np.shape(Zt))
        #fp[:] = Zt[:]
        #del fp

        Zt = np.squeeze(Zt)

        # create memory mapped file for Zs
        #fp = np.memmap(sonpath+base+'_data_star_lar.dat', dtype='float32', mode='w+', shape=np.shape(Zs))
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_star_lar.dat')),
                'w+') as ff:
            fp = np.memmap(ff, dtype='float32', mode='w+', shape=np.shape(Zt))
        fp[:] = Zt[:]
        del fp
        del Zt

        #shutil.move(os.path.normpath(os.path.join(sonpath,base+'_data_star_lar.dat')), os.path.normpath(os.path.join(sonpath,base+'_data_star_la.dat')))

        Zt = []
        if len(np.shape(star_fp)) > 2:
            for p in range(len(port_fp)):

                raw_input(
                    "Shore picking " + str(p + 1) + " of " +
                    str(len(port_fp)) +
                    " (port), are you ready? 60 seconds. Press Enter to continue..."
                )
                shoreline_port = {}
                fig = plt.figure()
                ax = plt.gca()
                ax.imshow(port_fp[p], cmap='gray')  #, origin = 'upper') #im =
                plt.axis('normal')
                plt.axis('tight')
                pts1 = plt.ginput(
                    n=300,
                    timeout=75)  # it will wait for 200 clicks or 75 seconds
                x1 = map(lambda x: x[0],
                         pts1)  # map applies the function passed as
                y1 = map(lambda x: x[1],
                         pts1)  # first parameter to each element of pts
                shoreline_port = np.interp(np.r_[:np.shape(port_fp[p])[1]], x1,
                                           y1)
                plt.close()
                del fig

                port_mg = port_fp[p].copy()

                shoreline_port = np.asarray(shoreline_port, 'int')
                # shift proportionally depending on where the bed is
                for k in range(np.shape(port_mg)[1]):
                    port_mg[shoreline_port[k]:, k] = np.nan

                del shoreline_port

                Zt.append(port_mg)

        else:

            raw_input(
                "Shore picking " + str(len(port_fp)) + " of " +
                str(len(port_fp)) +
                " (port), are you ready? 60 seconds. Press Enter to continue..."
            )
            shoreline_port = {}
            fig = plt.figure()
            ax = plt.gca()
            ax.imshow(port_fp, cmap='gray')  #, origin = 'upper') #im =
            plt.axis('normal')
            plt.axis('tight')
            pts1 = plt.ginput(
                n=300, timeout=75)  # it will wait for 200 clicks or 75 seconds
            x1 = map(lambda x: x[0],
                     pts1)  # map applies the function passed as
            y1 = map(lambda x: x[1],
                     pts1)  # first parameter to each element of pts
            shoreline_port = np.interp(np.r_[:np.shape(port_fp)[1]], x1, y1)
            plt.close()
            del fig

            port_mg = port_fp.copy()

            shoreline_port = np.asarray(shoreline_port, 'int')
            # shift proportionally depending on where the bed is
            for k in range(np.shape(port_mg)[1]):
                port_mg[shoreline_port[k]:, k] = np.nan

            del shoreline_port

            Zt.append(port_mg)

        Zt = np.squeeze(Zt)
        ## create memory mapped file for Z
        #fp = np.memmap(sonpath+base+'_data_port_la.dat', dtype='float32', mode='w+', shape=np.shape(Zt))
        #fp[:] = Zt[:]
        #del fp

        # create memory mapped file for Zp
        #fp = np.memmap(sonpath+base+'_data_port_lar.dat', dtype='float32', mode='w+', shape=np.shape(Zp))
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_port_lar.dat')),
                'w+') as ff:
            fp = np.memmap(ff, dtype='float32', mode='w+', shape=np.shape(Zt))
        fp[:] = Zt[:]
        del fp
        del Zt

        #shutil.move(os.path.normpath(os.path.join(sonpath,base+'_data_port_lar.dat')), os.path.normpath(os.path.join(sonpath,base+'_data_port_la.dat')))

    else:  #auto

        Zs = []
        Zp = []
        if len(np.shape(star_fp)) > 2:
            for p in range(len(star_fp)):
                merge = np.vstack((np.flipud(port_fp[p]), star_fp[p]))
                merge = np.asarray(merge, 'float64')

                merge_mask = np.vstack((np.flipud(port_fp[p]), star_fp[p]))

                merge[merge_mask == 0] = 0
                del merge_mask

                mask = np.asarray(merge != 0,
                                  'int8')  # only 8bit precision needed

                merge[np.isnan(merge)] = 0

                #Z,ind = humutils.sliding_window(merge,(win,win),(win/2,win/2))
                Z, ind = humutils.sliding_window(merge, (win, win), (win, win))

                #zmean = np.reshape(zmean, ( ind[0], ind[1] ) )
                Ny, Nx = np.shape(merge)
                #zmean[np.isnan(zmean)] = 0

                try:  #parallel processing with all available cores
                    w = Parallel(n_jobs=-1, verbose=0)(delayed(parallel_me)(
                        Z[k], dissim, correl, contrast, energy, mn)
                                                       for k in range(len(Z)))
                except:  #fall back to serial
                    w = Parallel(n_jobs=1, verbose=0)(delayed(parallel_me)(
                        Z[k], dissim, correl, contrast, energy, mn)
                                                      for k in range(len(Z)))

                zmean = np.reshape(w, (ind[0], ind[1]))
                del w

                M = humutils.im_resize(zmean, Nx, Ny)
                M[mask == 0] = 0
                del zmean

                bw = M > 0.5
                del M

                # erode and dilate to remove splotches of no data
                bw2 = binary_dilation(binary_erosion(bw,
                                                     structure=np.ones(
                                                         (3, 3))),
                                      structure=np.ones((13, 13)))
                #bw2 = binary_dilation(binary_erosion(bw,structure=np.ones((win/4,win/4))), structure=np.ones((win/4,win/4)))
                ##bw2 = binary_erosion(bw,structure=np.ones((win*2,win*2)))

                ## fill holes
                bw2 = binary_fill_holes(bw2, structure=np.ones(
                    (win, win))).astype(int)
                merge2 = grey_erosion(merge, structure=np.ones((win, win)))

                #del bw
                #bw2 = np.asarray(bw2!=0,'int8') # we only need 8 bit precision

                bw2 = np.asarray(bw != 0,
                                 'int8')  # we only need 8 bit precision
                del bw

                merge[bw2 == 1] = 0  #blank out bad data
                merge[merge2 == np.min(merge2)] = 0  #blank out bad data
                del merge2

                ## do plots of merged scans
                if doplot == 1:

                    Zdist = dist_m[shape_port[-1] * p:shape_port[-1] * (p + 1)]

                    fig = plt.figure()
                    plt.imshow(merge,
                               cmap='gray',
                               extent=[
                                   min(Zdist),
                                   max(Zdist), -extent * (1 / ft),
                                   extent * (1 / ft)
                               ])
                    plt.ylabel('Range (m)'), plt.xlabel(
                        'Distance along track (m)')

                    plt.axis('normal')
                    plt.axis('tight')
                    custom_save(sonpath,
                                'merge_corrected_rmshadow_scan' + str(p))
                    del fig

                Zp.append(np.flipud(merge[:shape_port[1], :]))
                Zs.append(merge[shape_port[1]:, :])
                del merge, bw2

        else:

            merge = np.vstack((np.flipud(port_fp), star_fp))
            merge = np.asarray(merge, 'float64')

            merge_mask = np.vstack((np.flipud(port_fp), star_fp))

            merge[merge_mask == 0] = 0
            del merge_mask

            mask = np.asarray(merge != 0, 'int8')  # only 8bit precision needed

            merge[np.isnan(merge)] = 0

            #Z,ind = humutils.sliding_window(merge,(win,win),(win/2,win/2))
            Z, ind = humutils.sliding_window(merge, (win, win), (win, win))

            #zmean = np.reshape(zmean, ( ind[0], ind[1] ) )
            Ny, Nx = np.shape(merge)
            #zmean[np.isnan(zmean)] = 0

            try:  #parallel processing with all available cores
                w = Parallel(n_jobs=-1, verbose=0)(delayed(parallel_me)(
                    Z[k], dissim, correl, contrast, energy, mn)
                                                   for k in range(len(Z)))
            except:  #fall back to serial
                w = Parallel(n_jobs=1, verbose=0)(delayed(parallel_me)(
                    Z[k], dissim, correl, contrast, energy, mn)
                                                  for k in range(len(Z)))

            zmean = np.reshape(w, (ind[0], ind[1]))
            del w

            M = humutils.im_resize(zmean, Nx, Ny)
            M[mask == 0] = 0
            del zmean

            bw = M > 0.5
            del M

            # erode and dilate to remove splotches of no data
            bw2 = binary_dilation(binary_erosion(bw, structure=np.ones(
                (3, 3))),
                                  structure=np.ones((13, 13)))
            #bw2 = binary_dilation(binary_erosion(bw,structure=np.ones((win/4,win/4))), structure=np.ones((win/4,win/4)))
            ##bw2 = binary_erosion(bw,structure=np.ones((win*2,win*2)))

            ## fill holes
            bw2 = binary_fill_holes(bw2, structure=np.ones(
                (win, win))).astype(int)
            merge2 = grey_erosion(merge, structure=np.ones((win, win)))

            #del bw
            #bw2 = np.asarray(bw2!=0,'int8') # we only need 8 bit precision

            bw2 = np.asarray(bw != 0, 'int8')  # we only need 8 bit precision
            del bw

            merge[bw2 == 1] = 0  #blank out bad data
            merge[merge2 == np.min(merge2)] = 0  #blank out bad data
            del merge2

            # erode and dilate to remove splotches of no data
            #bw2 = binary_dilation(binary_erosion(bw,structure=np.ones((3,3))), structure=np.ones((13,13)))
            #bw2 = binary_dilation(binary_erosion(bw,structure=np.ones((win,win))), structure=np.ones((win*2,win*2)))
            #bw2 = binary_erosion(bw,structure=np.ones((win,win)))

            # fill holes
            #bw2 = binary_fill_holes(bw2, structure=np.ones((3,3))).astype(int)
            #del bw
            #bw2 = np.asarray(bw2!=0,'int8') # we only need 8 bit precision

            #merge[bw2==1] = 0 #blank out bad data

            ## do plots of merged scans
            if doplot == 1:

                Zdist = dist_m
                fig = plt.figure()
                plt.imshow(merge,
                           cmap='gray',
                           extent=[
                               min(Zdist),
                               max(Zdist), -extent * (1 / ft),
                               extent * (1 / ft)
                           ])
                plt.ylabel('Range (m)'), plt.xlabel('Distance along track (m)')

                plt.axis('normal')
                plt.axis('tight')
                custom_save(sonpath, 'merge_corrected_rmshadow_scan' + str(0))
                del fig

            Zp.append(np.flipud(merge[:shape_port[0], :]))
            Zs.append(merge[shape_port[0]:, :])
            del merge, bw2

        Zp = np.squeeze(Zp)
        Zs = np.squeeze(Zs)
        # create memory mapped file for Zp
        #fp = np.memmap(sonpath+base+'_data_port_lar.dat', dtype='float32', mode='w+', shape=np.shape(Zp))
        #with open(sonpath+base+'_data_port_lar.dat', 'w+') as f:
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_port_lar.dat')),
                'w+') as ff:
            fp = np.memmap(ff, dtype='float32', mode='w+', shape=np.shape(Zp))
        fp[:] = Zp[:]
        del fp
        del Zp

        #shutil.move(sonpath+base+'_data_port_lar.dat', sonpath+base+'_data_port_la.dat')
        #shutil.move(os.path.normpath(os.path.join(sonpath,base+'_data_port_lar.dat')), os.path.normpath(os.path.join(sonpath,base+'_data_port_la.dat')))

        # create memory mapped file for Zs
        #fp = np.memmap(sonpath+base+'_data_star_lar.dat', dtype='float32', mode='w+', shape=np.shape(Zs))
        #with open(sonpath+base+'_data_star_lar.dat', 'w+') as f:
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_star_lar.dat')),
                'w+') as ff:
            fp = np.memmap(ff, dtype='float32', mode='w+', shape=np.shape(Zs))
        fp[:] = Zs[:]
        del fp
        del Zs

        #shutil.move(sonpath+base+'_data_star_lar.dat', sonpath+base+'_data_star_la.dat')
        #shutil.move(os.path.normpath(os.path.join(sonpath,base+'_data_star_lar.dat')), os.path.normpath(os.path.join(sonpath,base+'_data_star_la.dat')))

    if os.name == 'posix':  # true if linux/mac
        elapsed = (time.time() - start)
    else:  # windows
        elapsed = (time.clock() - start)
    print("Processing took " + str(elapsed) + "seconds to analyse")

    print("Done!")
    print("===================================================")
コード例 #7
0
ファイル: _pyhum_read.py プロジェクト: jacurtis-usgs/PyHum
def read(humfile, sonpath, cs2cs_args, c, draft, doplot, t, bedpick, flip_lr,
         model, calc_bearing, filt_bearing, chunk):  #cog = 1,
    '''
    Read a .DAT and associated set of .SON files recorded by a Humminbird(R)
    instrument.

    Parse the data into a set of memory mapped files that will
    subsequently be used by the other functions of the PyHum module.

    Export time-series data and metadata in other formats.

    Create a kml file for visualising boat track

    Syntax
    ----------
    [] = PyHum.read(humfile, sonpath, cs2cs_args, c, draft, doplot, t, bedpick, flip_lr, chunksize, model, calc_bearing, filt_bearing, chunk)

    Parameters
    ------------
    humfile : str
       path to the .DAT file
    sonpath : str
       path where the *.SON files are
    cs2cs_args : int, *optional* [Default="epsg:26949"]
       arguments to create coordinates in a projected coordinate system
       this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates
       into any projection supported by the proj.4 libraries
    c : float, *optional* [Default=1450.0]
       speed of sound in water (m/s). Defaults to a value of freshwater
    draft : float, *optional* [Default=0.3]
       draft from water surface to transducer face (m)
    doplot : float, *optional* [Default=1]
       if 1, plots will be made
    t : float, *optional* [Default=0.108]
       length of transducer array (m).
       Default value is that of the 998 series Humminbird(R)
    bedpick : int, *optional* [Default=1]
       if 1, bedpicking with be carried out automatically
       if 0, user will be prompted to pick the bed location on screen
    flip_lr : int, *optional* [Default=0]
       if 1, port and starboard scans will be flipped
       (for situations where the transducer is flipped 180 degrees)
    model: int, *optional* [Default=998]
       A 3 or 4 number code indicating the model number
       Examples: 998, 997, 1198, 1199
    calc_bearing : float, *optional* [Default=0]
       if 1, bearing will be calculated from coordinates
    filt_bearing : float, *optional* [Default=0]
       if 1, bearing will be filtered
    chunk : str, *optional* [Default='d100' (distance, 100 m)]
       letter, followed by a number.
       There are the following letter options:
       'd' - parse chunks based on distance, then number which is distance in m
       'p' - parse chunks based on number of pings, then number which is number of pings
       'h' - parse chunks based on change in heading, then number which is the change in heading in degrees
       '1' - process just 1 chunk

    Returns
    ---------
    sonpath+base+'_data_port.dat': memory-mapped file
        contains the raw echogram from the port side
        sidescan sonar (where present)

    sonpath+base+'_data_port.dat': memory-mapped file
        contains the raw echogram from the starboard side
        sidescan sonar (where present)

    sonpath+base+'_data_dwnhi.dat': memory-mapped file
        contains the raw echogram from the high-frequency
        echosounder (where present)

    sonpath+base+'_data_dwnlow.dat': memory-mapped file
        contains the raw echogram from the low-frequency
        echosounder (where present)

    sonpath+base+"trackline.kml": google-earth kml file
        contains the trackline of the vessel during data
        acquisition

    sonpath+base+'rawdat.csv': comma separated value file
        contains time-series data. columns corresponding to
        longitude
        latitude
        easting (m)
        northing (m)
        depth to bed (m)
        alongtrack cumulative distance (m)
        vessel heading (deg.)

    sonpath+base+'meta.mat': .mat file
        matlab format file containing a dictionary object
        holding metadata information. Fields are:
        e : ndarray, easting (m)
        n : ndarray, northing (m)
        es : ndarray, low-pass filtered easting (m)
        ns : ndarray, low-pass filtered northing (m)
        lat : ndarray, latitude
        lon : ndarray, longitude
        shape_port : tuple, shape of port scans in memory mapped file
        shape_star : tuple, shape of starboard scans in memory mapped file
        shape_hi : tuple, shape of high-freq. scans in memory mapped file
        shape_low : tuple, shape of low-freq. scans in memory mapped file
        dep_m : ndarray, depth to bed (m)
        dist_m : ndarray, distance along track (m)
        heading : ndarray, heading of vessel (deg. N)
        pix_m: float, size of 1 pixel in across-track dimension (m)
        bed : ndarray, depth to bed (m)
        c : float, speed of sound in water (m/s)
        t : length of sidescan transducer array (m)
        spd : ndarray, vessel speed (m/s)
        time_s : ndarray, time elapsed (s)
        caltime : ndarray, unix epoch time (s)
    '''

    # prompt user to supply file if no input file given
    if not humfile:
        print('An input file is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        humfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print('A *.SON directory is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print('Input file is %s' % (humfile))

    if sonpath:
        print('Son files are in %s' % (sonpath))

    if cs2cs_args:
        print('cs2cs arguments are %s' % (cs2cs_args))

    if draft:
        draft = float(draft)
        print('Draft: %s' % (str(draft)))

    if c:
        c = float(c)
        print('Celerity of sound: %s m/s' % (str(c)))

    if doplot:
        doplot = int(doplot)
        if doplot == 0:
            print("Plots will not be made")

    if flip_lr:
        flip_lr = int(flip_lr)
        if flip_lr == 1:
            print("Port and starboard will be flipped")

    if t:
        t = np.asarray(t, float)
        print('Transducer length is %s m' % (str(t)))

    if bedpick:
        bedpick = np.asarray(bedpick, int)
        if bedpick == 1:
            print('Bed picking is auto')
        elif bedpick == 0:
            print('Bed picking is manual')
        else:
            print('User will be prompted per chunk about bed picking method')

    if chunk:
        chunk = str(chunk)
        if chunk[0] == 'd':
            chunkmode = 1
            chunkval = int(chunk[1:])
            print('Chunks based on distance of %s m' % (str(chunkval)))
        elif chunk[0] == 'p':
            chunkmode = 2
            chunkval = int(chunk[1:])
            print('Chunks based on %s pings' % (str(chunkval)))
        elif chunk[0] == 'h':
            chunkmode = 3
            chunkval = int(chunk[1:])
            print('Chunks based on heading devation of %s degrees' %
                  (str(chunkval)))
        elif chunk[0] == '1':
            chunkmode = 4
            chunkval = 1
            print('Only 1 chunk will be produced')
        else:
            print(
                "Chunk mode not understood - should be 'd', 'p', or 'h' - using defaults"
            )
            chunkmode = 1
            chunkval = 100
            print('Chunks based on distance of %s m' % (str(chunkval)))

    if model:
        try:
            model = int(model)
            print("Data is from the %s series" % (str(model)))
        except:
            if model == 'onix':
                model = 0
                print("Data is from the ONIX series")
            elif model == 'helix':
                model = 1
                print("Data is from the HELIX series")
            elif model == 'mega':
                model = 2
                print("Data is from the MEGA series")
#    if cog:
#       cog = int(cog)
#       if cog==1:
#          print "Heading based on course-over-ground"

    if calc_bearing:
        calc_bearing = int(calc_bearing)
        if calc_bearing == 1:
            print("Bearing will be calculated from coordinates")

    if filt_bearing:
        filt_bearing = int(filt_bearing)
        if filt_bearing == 1:
            print("Bearing will be filtered")

    ## for debugging
    #humfile = r"test.DAT"; sonpath = "test_data"
    #cs2cs_args = "epsg:26949"; doplot = 1; draft = 0
    #c=1450; bedpick=1; fliplr=1; chunk = 'd100'
    #model=998; cog=1; calc_bearing=0; filt_bearing=0

    #if model==2:
    #   f = 1000
    #else:
    f = 455

    try:
        print(
            "Checking the epsg code you have chosen for compatibility with Basemap ... "
        )
        from mpl_toolkits.basemap import Basemap
        m = Basemap(projection='merc',
                    epsg=cs2cs_args.split(':')[1],
                    resolution='i',
                    llcrnrlon=10,
                    llcrnrlat=10,
                    urcrnrlon=30,
                    urcrnrlat=30)
        del m
        print("... epsg code compatible")
    except (ValueError):
        print(
            "Error: the epsg code you have chosen is not compatible with Basemap"
        )
        print(
            "please choose a different epsg code (http://spatialreference.org/)"
        )
        print("program will now close")
        sys.exit()

    # start timer
    if os.name == 'posix':  # true if linux/mac or cygwin on windows
        start = time.time()
    else:  # windows
        start = time.clock()

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    # get the SON files from this directory
    sonfiles = glob.glob(sonpath + '*.SON')
    if not sonfiles:
        sonfiles = glob.glob(os.getcwd() + os.sep + sonpath + '*.SON')

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    print("WARNING: Because files have to be read in byte by byte,")
    print("this could take a very long time ...")

    #reading each sonfile in parallel should be faster ...
    try:
        o = Parallel(n_jobs=np.min([len(sonfiles), cpu_count()]), verbose=0)(
            delayed(getscans)(sonfiles[k], humfile, c, model, cs2cs_args)
            for k in range(len(sonfiles)))
        X, Y, A, B = zip(*o)

        for k in range(len(Y)):
            if Y[k] == 'sidescan_port':
                dat = A[k]  #data.gethumdat()
                metadat = B[k]  #data.getmetadata()
                if flip_lr == 0:
                    data_port = X[k].astype('int16')
                else:
                    data_star = X[k].astype('int16')

            elif Y[k] == 'sidescan_starboard':
                if flip_lr == 0:
                    data_star = X[k].astype('int16')
                else:
                    data_port = X[k].astype('int16')

            elif Y[k] == 'down_lowfreq':
                data_dwnlow = X[k].astype('int16')

            elif Y[k] == 'down_highfreq':
                data_dwnhi = X[k].astype('int16')

            elif Y[k] == 'down_vhighfreq':  #hopefully this only applies to mega systems
                data_dwnhi = X[k].astype('int16')

        del X, Y, A, B, o
        old_pyread = 0

        if 'data_port' not in locals():
            data_port = ''
            print("portside scan not available")

        if 'data_star' not in locals():
            data_star = ''
            print("starboardside scan not available")

        if 'data_dwnhi' not in locals():
            data_dwnlow = ''
            print("high-frq. downward scan not available")

        if 'data_dwnlow' not in locals():
            data_dwnlow = ''
            print("low-frq. downward scan not available")

    except:  # revert back to older version if paralleleised version fails

        print(
            "something went wrong with the parallelised version of pyread ...")

        try:
            import pyread
        except:
            from . import pyread

        data = pyread.pyread(sonfiles, humfile, c, model, cs2cs_args)

        dat = data.gethumdat()

        metadat = data.getmetadata()

        old_pyread = 1

    nrec = len(metadat['n'])

    metadat['instr_heading'] = metadat['heading'][:nrec]

    #metadat['heading'] = humutils.get_bearing(calc_bearing, filt_bearing, cog, metadat['lat'], metadat['lon'], metadat['instr_heading'])

    try:
        es = humutils.runningMeanFast(metadat['e'][:nrec],
                                      len(metadat['e'][:nrec]) / 100)
        ns = humutils.runningMeanFast(metadat['n'][:nrec],
                                      len(metadat['n'][:nrec]) / 100)
    except:
        es = metadat['e'][:nrec]
        ns = metadat['n'][:nrec]

    metadat['es'] = es
    metadat['ns'] = ns

    try:
        trans = pyproj.Proj(init=cs2cs_args)
    except:
        trans = pyproj.Proj(cs2cs_args.lstrip(), inverse=True)

    lon, lat = trans(es, ns, inverse=True)
    metadat['lon'] = lon
    metadat['lat'] = lat

    metadat['heading'] = humutils.get_bearing(calc_bearing, filt_bearing,
                                              metadat['lat'], metadat['lon'],
                                              metadat['instr_heading'])  #cog

    dist_m = humutils.get_dist(lat, lon)
    metadat['dist_m'] = dist_m

    if calc_bearing == 1:  # recalculate speed, m/s
        ds = np.gradient(np.squeeze(metadat['time_s']))
        dx = np.gradient(np.squeeze(metadat['dist_m']))
        metadat['spd'] = dx[:nrec] / ds[:nrec]

    # theta at 3dB in the horizontal
    theta3dB = np.arcsin(c / (t * (f * 1000)))
    #resolution of 1 sidescan pixel to nadir
    ft = (np.pi / 2) * (1 / theta3dB)  #/ (f/455)

    dep_m = humutils.get_depth(metadat['dep_m'][:nrec])

    if old_pyread == 1:  #older pyread version

        # port scan
        try:
            if flip_lr == 0:
                data_port = data.getportscans().astype('int16')
            else:
                data_port = data.getstarscans().astype('int16')
        except:
            data_port = ''
            print("portside scan not available")

    if data_port != '':

        Zt, ind_port = makechunks_scan(chunkmode, chunkval, metadat, data_port,
                                       0)
        del data_port

        ## create memory mapped file for Z
        shape_port = io.set_mmap_data(sonpath, base, '_data_port.dat', 'int16',
                                      Zt)

        ##we are only going to access the portion of memory required
        port_fp = io.get_mmap_data(sonpath, base, '_data_port.dat', 'int16',
                                   shape_port)

    if old_pyread == 1:  #older pyread version
        # starboard scan
        try:
            if flip_lr == 0:
                data_star = data.getstarscans().astype('int16')
            else:
                data_star = data.getportscans().astype('int16')
        except:
            data_star = ''
            print("starboardside scan not available")

    if data_star != '':

        Zt, ind_star = makechunks_scan(chunkmode, chunkval, metadat, data_star,
                                       1)
        del data_star

        # create memory mapped file for Z
        shape_star = io.set_mmap_data(sonpath, base, '_data_star.dat', 'int16',
                                      Zt)

        star_fp = io.get_mmap_data(sonpath, base, '_data_star.dat', 'int16',
                                   shape_star)

    if 'star_fp' in locals() and 'port_fp' in locals():
        # check that port and starboard are same size
        # and trim if not
        if np.shape(star_fp) != np.shape(port_fp):
            print(
                "port and starboard scans are different sizes ... rectifying")
            if np.shape(port_fp[0])[1] > np.shape(star_fp[0])[1]:
                tmp = port_fp.copy()
                tmp2 = np.empty_like(star_fp)
                for k in range(len(tmp)):
                    tmp2[k] = tmp[k][:, :np.shape(star_fp[k])[1]]
                del tmp

                # create memory mapped file for Z
                shape_port = io.set_mmap_data(sonpath, base, '_data_port2.dat',
                                              'int16', tmp2)
                #shape_star = shape_port.copy()
                shape_star = tuple(np.asarray(shape_port).copy())

                ##we are only going to access the portion of memory required
                port_fp = io.get_mmap_data(sonpath, base, '_data_port2.dat',
                                           'int16', shape_port)

                ind_port = list(ind_port)
                ind_port[-1] = np.shape(star_fp[0])[1]
                ind_port = tuple(ind_port)

            elif np.shape(port_fp[0])[1] < np.shape(star_fp[0])[1]:
                tmp = star_fp.copy()
                tmp2 = np.empty_like(port_fp)
                for k in range(len(tmp)):
                    tmp2[k] = tmp[k][:, :np.shape(port_fp[k])[1]]
                del tmp

                # create memory mapped file for Z
                shape_port = io.set_mmap_data(sonpath, base, '_data_star2.dat',
                                              'int16', tmp2)
                #shape_star = shape_port.copy()
                shape_star = tuple(np.asarray(shape_port).copy())

                #we are only going to access the portion of memory required
                star_fp = io.get_mmap_data(sonpath, base, '_data_star2.dat',
                                           'int16', shape_star)

                ind_star = list(ind_star)
                ind_star[-1] = np.shape(port_fp[0])[1]
                ind_star = tuple(ind_star)

    if old_pyread == 1:  #older pyread version
        # low-freq. sonar
        try:
            data_dwnlow = data.getlowscans().astype('int16')
        except:
            data_dwnlow = ''
            print("low-freq. scan not available")

    if data_dwnlow != '':

        Zt, ind_low = makechunks_scan(chunkmode, chunkval, metadat,
                                      data_dwnlow, 2)
        del data_dwnlow

        # create memory mapped file for Z
        shape_low = io.set_mmap_data(sonpath, base, '_data_dwnlow.dat',
                                     'int16', Zt)

        ##we are only going to access the portion of memory required
        dwnlow_fp = io.get_mmap_data(sonpath, base, '_data_dwnlow.dat',
                                     'int16', shape_low)

    if old_pyread == 1:  #older pyread version
        # hi-freq. sonar
        try:
            data_dwnhi = data.gethiscans().astype('int16')
        except:
            data_dwnhi = ''
            print("high-freq. scan not available")

    if data_dwnhi != '':

        Zt, ind_hi = makechunks_scan(chunkmode, chunkval, metadat, data_dwnhi,
                                     3)
        del data_dwnhi

        # create memory mapped file for Z
        shape_hi = io.set_mmap_data(sonpath, base, '_data_dwnhi.dat', 'int16',
                                    Zt)

        dwnhi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi.dat', 'int16',
                                    shape_hi)

    if 'dwnhi_fp' in locals() and 'dwnlow_fp' in locals():
        # check that low and high are same size
        # and trim if not
        if (np.shape(dwnhi_fp) != np.shape(dwnlow_fp)) and (chunkmode != 4):
            print("dwnhi and dwnlow are different sizes ... rectifying")
            if np.shape(dwnhi_fp[0])[1] > np.shape(dwnlow_fp[0])[1]:
                tmp = dwnhi_fp.copy()
                tmp2 = np.empty_like(dwnlow_fp)
                for k in range(len(tmp)):
                    tmp2[k] = tmp[k][:, :np.shape(dwnlow_fp[k])[1]]
                del tmp

                # create memory mapped file for Z
                shape_low = io.set_mmap_data(sonpath, base, '_data_dwnhi2.dat',
                                             'int16', tmp2)
                #shape_hi = shape_low.copy()
                shape_hi = tuple(np.asarray(shape_low).copy())

                ##we are only going to access the portion of memory required
                dwnhi_fp = io.get_mmap_data(sonpath, base, '_data_dwnhi2.dat',
                                            'int16', shape_hi)

                ind_hi = list(ind_hi)
                ind_hi[-1] = np.shape(dwnlow_fp[0])[1]
                ind_hi = tuple(ind_hi)

            elif np.shape(dwnhi_fp[0])[1] < np.shape(dwnlow_fp[0])[1]:
                tmp = dwnlow_fp.copy()
                tmp2 = np.empty_like(dwnhi_fp)
                for k in range(len(tmp)):
                    tmp2[k] = tmp[k][:, :np.shape(dwnhi_fp[k])[1]]
                del tmp

                # create memory mapped file for Z
                shape_low = io.set_mmap_data(sonpath, base,
                                             '_data_dwnlow2.dat', 'int16',
                                             tmp2)
                #shape_hi = shape_low.copy()
                shape_hi = tuple(np.asarray(shape_low).copy())

                ##we are only going to access the portion of memory required
                dwnlow_fp = io.get_mmap_data(sonpath, base,
                                             '_data_dwnlow2.dat', 'int16',
                                             shape_low)

                ind_low = list(ind_low)
                ind_low[-1] = np.shape(dwnhi_fp[0])[1]
                ind_low = tuple(ind_low)

    if old_pyread == 1:  #older pyread version
        del data

    if ('shape_port' in locals()) and (chunkmode != 4):
        metadat['shape_port'] = shape_port
        nrec = metadat['shape_port'][0] * metadat['shape_port'][2]
    elif ('shape_port' in locals()) and (chunkmode == 4):
        metadat['shape_port'] = shape_port
        nrec = metadat['shape_port'][1]
    else:
        metadat['shape_port'] = ''

    if ('shape_star' in locals()) and (chunkmode != 4):
        metadat['shape_star'] = shape_star
        nrec = metadat['shape_star'][0] * metadat['shape_star'][2]
    elif ('shape_star' in locals()) and (chunkmode == 4):
        metadat['shape_star'] = shape_star
        nrec = metadat['shape_star'][1]
    else:
        metadat['shape_star'] = ''

    if ('shape_hi' in locals()) and (chunkmode != 4):
        metadat['shape_hi'] = shape_hi
        #nrec = metadat['shape_hi'][0] * metadat['shape_hi'][2] * 2
    elif ('shape_hi' in locals()) and (chunkmode == 4):
        metadat['shape_hi'] = shape_hi
    else:
        metadat['shape_hi'] = ''

    if ('shape_low' in locals()) and (chunkmode != 4):
        metadat['shape_low'] = shape_low
        #nrec = metadat['shape_low'][0] * metadat['shape_low'][2] * 2
    elif ('shape_low' in locals()) and (chunkmode == 4):
        metadat['shape_low'] = shape_low
    else:
        metadat['shape_low'] = ''

    #make kml boat trackline
    humutils.make_trackline(lon, lat, sonpath, base)

    if 'port_fp' in locals() and 'star_fp' in locals():

        #if not os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'meta.mat'))):
        if 2 > 1:
            if bedpick == 1:  # auto

                x, bed = humutils.auto_bedpick(ft, dep_m, chunkmode, port_fp,
                                               c)

                if len(dist_m) < len(bed):
                    dist_m = np.append(
                        dist_m, dist_m[-1] * np.ones(len(bed) - len(dist_m)))

                if doplot == 1:
                    if chunkmode != 4:
                        for k in range(len(star_fp)):
                            plot_2bedpicks(
                                port_fp[k], star_fp[k],
                                bed[ind_port[-1] * k:ind_port[-1] * (k + 1)],
                                dist_m[ind_port[-1] * k:ind_port[-1] *
                                       (k + 1)],
                                x[ind_port[-1] * k:ind_port[-1] * (k + 1)], ft,
                                shape_port, sonpath, k, chunkmode)
                    else:
                        plot_2bedpicks(port_fp, star_fp, bed, dist_m, x, ft,
                                       shape_port, sonpath, 0, chunkmode)

                # 'real' bed is estimated to be the minimum of the two
                bed = np.min(np.vstack((bed[:nrec], np.squeeze(x[:nrec]))),
                             axis=0)
                bed = humutils.runningMeanFast(bed, 3)

            elif bedpick > 1:  # user prompt

                x, bed = humutils.auto_bedpick(ft, dep_m, chunkmode, port_fp,
                                               c)

                if len(dist_m) < len(bed):
                    dist_m = np.append(
                        dist_m, dist_m[-1] * np.ones(len(bed) - len(dist_m)))

                # 'real' bed is estimated to be the minimum of the two
                bed = np.min(np.vstack((bed[:nrec], np.squeeze(x[:nrec]))),
                             axis=0)
                bed = humutils.runningMeanFast(bed, 3)

                # manually intervene
                fig = plt.figure()
                ax = plt.gca()
                if chunkmode != 4:
                    im = ax.imshow(np.hstack(port_fp),
                                   cmap='gray',
                                   origin='upper')
                else:
                    im = ax.imshow(port_fp, cmap='gray', origin='upper')
                plt.plot(bed, 'r')
                plt.axis('normal')
                plt.axis('tight')

                pts1 = plt.ginput(
                    n=300,
                    timeout=30)  # it will wait for 200 clicks or 60 seconds
                x1 = map(lambda x: x[0],
                         pts1)  # map applies the function passed as
                y1 = map(lambda x: x[1],
                         pts1)  # first parameter to each element of pts
                plt.close()
                del fig

                if x1 != []:  # if x1 is not empty
                    tree = KDTree(zip(np.arange(1, len(bed)), bed))
                    try:
                        dist, inds = tree.query(zip(x1, y1),
                                                k=100,
                                                eps=5,
                                                n_jobs=-1)
                    except:
                        dist, inds = tree.query(zip(x1, y1), k=100, eps=5)

                    b = np.interp(inds, x1, y1)
                    bed2 = bed.copy()
                    bed2[inds] = b
                    bed = bed2

                if doplot == 1:
                    if chunkmode != 4:
                        for k in range(len(star_fp)):
                            plot_2bedpicks(
                                port_fp[k], star_fp[k],
                                bed[ind_port[-1] * k:ind_port[-1] * (k + 1)],
                                dist_m[ind_port[-1] * k:ind_port[-1] *
                                       (k + 1)],
                                x[ind_port[-1] * k:ind_port[-1] * (k + 1)], ft,
                                shape_port, sonpath, k, chunkmode)
                    else:
                        plot_2bedpicks(port_fp, star_fp, bed, dist_m, x, ft,
                                       shape_port, sonpath, 0, chunkmode)

            else:  #manual

                beds = []

                if chunkmode != 4:
                    for k in range(len(port_fp)):
                        raw_input(
                            "Bed picking " + str(k + 1) + " of " +
                            str(len(port_fp)) +
                            ", are you ready? 30 seconds. Press Enter to continue..."
                        )
                        bed = {}
                        fig = plt.figure()
                        ax = plt.gca()
                        im = ax.imshow(port_fp[k], cmap='gray', origin='upper')
                        pts1 = plt.ginput(
                            n=300, timeout=30
                        )  # it will wait for 200 clicks or 60 seconds
                        x1 = map(lambda x: x[0],
                                 pts1)  # map applies the function passed as
                        y1 = map(
                            lambda x: x[1],
                            pts1)  # first parameter to each element of pts
                        bed = np.interp(np.r_[:ind_port[-1]], x1, y1)
                        plt.close()
                        del fig
                        beds.append(bed)
                        extent = np.shape(port_fp[k])[0]
                    bed = np.asarray(np.hstack(beds), 'float')
                else:
                    raw_input(
                        "Bed picking - are you ready? 30 seconds. Press Enter to continue..."
                    )
                    bed = {}
                    fig = plt.figure()
                    ax = plt.gca()
                    im = ax.imshow(port_fp, cmap='gray', origin='upper')
                    pts1 = plt.ginput(
                        n=300, timeout=30
                    )  # it will wait for 200 clicks or 60 seconds
                    x1 = map(lambda x: x[0],
                             pts1)  # map applies the function passed as
                    y1 = map(lambda x: x[1],
                             pts1)  # first parameter to each element of pts
                    bed = np.interp(np.r_[:ind_port[-1]], x1, y1)
                    plt.close()
                    del fig
                    beds.append(bed)
                    extent = np.shape(port_fp)[1]
                    bed = np.asarray(np.hstack(beds), 'float')

            # now revise the depth in metres
            dep_m = (1 / ft) * bed

            if doplot == 1:
                if chunkmode != 4:
                    for k in range(len(star_fp)):
                        plot_bedpick(
                            port_fp[k], star_fp[k], (1 / ft) *
                            bed[ind_port[-1] * k:ind_port[-1] * (k + 1)],
                            dist_m[ind_port[-1] * k:ind_port[-1] * (k + 1)],
                            ft, shape_port, sonpath, k, chunkmode)
                else:
                    plot_bedpick(port_fp, star_fp, (1 / ft) * bed, dist_m, ft,
                                 shape_port, sonpath, 0, chunkmode)

            metadat['bed'] = bed[:nrec]

    else:
        metadat['bed'] = dep_m[:nrec] * ft

    metadat['heading'] = metadat['heading'][:nrec]
    metadat['lon'] = lon[:nrec]
    metadat['lat'] = lat[:nrec]
    metadat['dist_m'] = dist_m[:nrec]
    metadat['dep_m'] = dep_m[:nrec]
    metadat['pix_m'] = 1 / ft
    metadat['bed'] = metadat['bed'][:nrec]
    metadat['c'] = c
    metadat['t'] = t
    if model == 2:
        metadat['f'] = f * 2
    else:
        metadat['f'] = f

    metadat['spd'] = metadat['spd'][:nrec]
    metadat['time_s'] = metadat['time_s'][:nrec]
    metadat['e'] = metadat['e'][:nrec]
    metadat['n'] = metadat['n'][:nrec]
    metadat['es'] = metadat['es'][:nrec]
    metadat['ns'] = metadat['ns'][:nrec]
    try:
        metadat['caltime'] = metadat['caltime'][:nrec]
    except:
        metadat['caltime'] = metadat['caltime']

    savemat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')),
            metadat,
            oned_as='row')

    f = open(os.path.normpath(os.path.join(sonpath, base + 'rawdat.csv')),
             'wt')
    writer = csv.writer(f)
    writer.writerow(
        ('longitude', 'latitude', 'easting', 'northing', 'depth (m)',
         'distance (m)', 'instr. heading (deg)', 'heading (deg.)'))
    for i in range(0, nrec):
        writer.writerow(
            (float(lon[i]), float(lat[i]), float(es[i]), float(ns[i]),
             float(dep_m[i]), float(dist_m[i]),
             float(metadat['instr_heading'][i]), float(metadat['heading'][i])))
    f.close()

    del lat, lon, dep_m  #, dist_m

    if doplot == 1:

        plot_pos(sonpath, metadat, es, ns)

        if 'dwnlow_fp' in locals():

            plot_dwnlow(dwnlow_fp, chunkmode, sonpath)

        if 'dwnhi_fp' in locals():

            plot_dwnhi(dwnhi_fp, chunkmode, sonpath)

    if os.name == 'posix':  # true if linux/mac
        elapsed = (time.time() - start)
    else:  # windows
        elapsed = (time.clock() - start)
    print("Processing took " + str(elapsed) + "seconds to analyse")

    print("Done!")
    print("===================================================")
コード例 #8
0
def mosaic_texture(humfile, sonpath, cs2cs_args = "epsg:26949", res = 99, nn = 5, weight = 1):
         
    '''
    Create mosaics of the spatially referenced sidescan echograms

    Syntax
    ----------
    [] = PyHum.mosaic_texture(humfile, sonpath, cs2cs_args, res, nn, weight)

    Parameters
    ----------
    humfile : str
       path to the .DAT file
    sonpath : str
       path where the *.SON files are
    cs2cs_args : int, *optional* [Default="epsg:26949"]
       arguments to create coordinates in a projected coordinate system
       this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates
       into any projection supported by the proj.4 libraries
    res : float, *optional* [Default=0]
       grid resolution of output gridded texture map
       if res=99, res will be determined automatically from the spatial resolution of 1 pixel
    nn: int, *optional* [Default=5]
       number of nearest neighbours for gridding
    weight: int, *optional* [Default=1]
       specifies the type of pixel weighting in the gridding process
       weight = 1, based on grazing angle and inverse distance weighting
       weight = 2, based on grazing angle only
       weight = 3, inverse distance weighting only
       weight = 4, no weighting
    
    Returns
    -------

    sonpath+'GroundOverlay.kml': kml file
        contains gridded (or point cloud) sidescan intensity map for importing into google earth
        of the pth chunk

    sonpath+'map.png' : 
        image overlay associated with the kml file

    '''

    # prompt user to supply file if no input file given
    if not humfile:
       print('An input file is required!!!!!!')
       Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
       humfile = askopenfilename(filetypes=[("DAT files","*.DAT")]) 

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
       print('A *.SON directory is required!!!!!!')
       Tk().withdraw() # we don't want a full GUI, so keep the root window from appearing
       sonpath = askdirectory() 

    # print given arguments to screen and convert data type where necessary
    if humfile:
       print('Input file is %s' % (humfile))

    if sonpath:
       print('Sonar file path is %s' % (sonpath))

    if cs2cs_args:
       print('cs2cs arguments are %s' % (cs2cs_args))  

    if res:
       res = np.asarray(res,float)
       print('Gridding resolution: %s' % (str(res)))   
       
    if nn:
       nn = int(nn)
       print('Number of nearest neighbours for gridding: %s' % (str(nn)))
                    
    if weight:
       weight = int(weight)
       print('Weighting for gridding: %s' % (str(weight)))               


    ##nn = 5 #number of nearest neighbours in gridding
    ##noisefloor=10 # noise threshold in dB W

    # start timer
    if os.name=='posix': # true if linux/mac or cygwin on windows
       start = time.time()
    else: # windows
       start = time.clock()

    trans =  pyproj.Proj(init=cs2cs_args)

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1]!=os.sep:
       sonpath = sonpath + os.sep

    base = humfile.split('.DAT') # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    meta = loadmat(os.path.normpath(os.path.join(sonpath,base+'meta.mat')))

    esi = np.squeeze(meta['e'])
    nsi = np.squeeze(meta['n']) 
    
    theta = np.squeeze(meta['heading'])/(180/np.pi)

    # load memory mapped scans
    shape_port = np.squeeze(meta['shape_port'])
    if shape_port!='':
       if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_port_lar.dat'))):
          port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat', 'float32', tuple(shape_port))
       else:
          port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat', 'float32', tuple(shape_port))

    shape_star = np.squeeze(meta['shape_star'])
    if shape_star!='':
       if os.path.isfile(os.path.normpath(os.path.join(sonpath,base+'_data_star_lar.dat'))):
             star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat', 'float32', tuple(shape_star))
       else:
          star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat', 'float32', tuple(shape_star))

    # time varying gain
    tvg = ((8.5*10**-5)+(3/76923)+((8.5*10**-5)/4))*meta['c']
        
    # depth correction
    dist_tvg = np.squeeze(((np.tan(np.radians(25)))*np.squeeze(meta['dep_m']))-(tvg))

    # read in range data
    R_fp = io.get_mmap_data(sonpath, base, '_data_range.dat', 'float32', tuple(shape_star))

    dx = np.arcsin(meta['c']/(1000*meta['t']*meta['f']))
    pix_m = meta['pix_m']
    c = meta['c']

    if not os.path.isfile( os.path.normpath(os.path.join(sonpath,base+"S.p")) ):
    #if 2 > 1:
       inputfiles = []
       if len(shape_star)>2:    
          for p in range(len(star_fp)):
             e = esi[shape_port[-1]*p:shape_port[-1]*(p+1)]
             n = nsi[shape_port[-1]*p:shape_port[-1]*(p+1)]
             t = theta[shape_port[-1]*p:shape_port[-1]*(p+1)]
             d = dist_tvg[shape_port[-1]*p:shape_port[-1]*(p+1)]
             dat_port = port_fp[p]
             dat_star = star_fp[p]
             data_R = R_fp[p]
             print("writing chunk %s " % (str(p)))
             write_points(e, n, t, d, dat_port, dat_star, data_R, pix_m, res, cs2cs_args, sonpath, p, c, dx)
             inputfiles.append(os.path.normpath(os.path.join(sonpath,'x_y_class'+str(p)+'.asc')))
       else:
          p=0
          print("writing chunk %s " % (str(p)))
          write_points(esi, nsi, theta, dist_tvg, port_fp, star_fp, R_fp, meta['pix_m'], res, cs2cs_args, sonpath, 0, c, dx)
          inputfiles.append(os.path.normpath(os.path.join(sonpath,'x_y_class'+str(p)+'.asc')))         
          
       #trans =  pyproj.Proj(init=cs2cs_args)

       # D, R, h, t
       print("reading points from %s files" % (str(len(inputfiles))))
       X,Y,S,D,R,h,t,i = getxys(inputfiles)

       print("%s points read from %s files" % (str(len(S)), str(len(inputfiles))))

       # remove values where sidescan intensity is zero
       ind = np.where(np.logical_not(S==0))[0]

       X = X[ind]; Y = Y[ind]
       S = S[ind]; D = D[ind]
       R = R[ind]; h = h[ind]
       t = t[ind]; i = i[ind]
       del ind   
   
       # save to file for temporary storage
       pickle.dump( S, open( os.path.normpath(os.path.join(sonpath,base+"S.p")), "wb" ) ); del S
       pickle.dump( D, open( os.path.normpath(os.path.join(sonpath,base+"D.p")), "wb" ) ); del D
       pickle.dump( t, open( os.path.normpath(os.path.join(sonpath,base+"t.p")), "wb" ) ); del t
       pickle.dump( i, open( os.path.normpath(os.path.join(sonpath,base+"i.p")), "wb" ) ); del i

       pickle.dump( X, open( os.path.normpath(os.path.join(sonpath,base+"X.p")), "wb" ) ); del X
       pickle.dump( Y, open( os.path.normpath(os.path.join(sonpath,base+"Y.p")), "wb" ) ); del Y
       pickle.dump( R, open( os.path.normpath(os.path.join(sonpath,base+"R.p")), "wb" ) ); 
       pickle.dump( h, open( os.path.normpath(os.path.join(sonpath,base+"h.p")), "wb" ) ); 

       #grazing angle
       g = np.arctan(R.flatten(),h.flatten())
       pickle.dump( g, open( os.path.normpath(os.path.join(sonpath,base+"g.p")), "wb" ) ); del g, R, h
   
    print("creating grids ...") 

    if res==0:
       res=99

    if res==99:

       #### prepare grids
       R = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"R.p")), "rb" ) )

       ## actual along-track resolution is this: dx times dy = Af
       tmp = R * dx * (c*0.007 / 2)
       del R

       resg = np.min(tmp[tmp>0])
       del tmp
    else:
       resg = res

    X = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"X.p")), "rb" ) )
    Y = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"Y.p")), "rb" ) )
    
    humlon, humlat = trans(X, Y, inverse=True)

    grid_x, grid_y = np.meshgrid( np.arange(np.min(X), np.max(X), resg), np.arange(np.min(Y), np.max(Y), resg) )    
 
    shape = np.shape(grid_x)

    tree = KDTree(zip(X.flatten(), Y.flatten()))
    del X, Y

    print("mosaicking ...")  
    #k nearest neighbour
    try:
       dist, inds = tree.query(zip(grid_x.flatten(), grid_y.flatten()), k = nn, n_jobs=-1)
    except:
       #print ".... update your scipy installation to use faster kd-tree"   
       dist, inds = tree.query(zip(grid_x.flatten(), grid_y.flatten()), k = nn)    
    
    #del grid_x, grid_y
    
    if weight==1:
       g = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"g.p")), "rb" ) )
       w = g[inds] + 1.0 / dist**2
       del g
    elif weight==2:
       g = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"g.p")), "rb" ) )
       w = g[inds]
       del g
    elif weight==3:
       w = 1.0 / dist**2    
    elif weight==4:
       w = 1.0
    
    #g = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"g.p")), "rb" ) )
    #w = g[inds] + 1.0 / dist**2
    #del g

    if weight < 4:
       w[np.isinf(w)]=1
       w[np.isnan(w)]=1
       w[w>10000]=10000
       w[w<=0]=1
    
    # load in sidescan intensity
    S = pickle.load( open( os.path.normpath(os.path.join(sonpath,base+"S.p")), "rb" ) )
    # filter out noise pixels
    S[S<noisefloor] = np.nan

    if nn==1:
       Sdat_g = (w * S.flatten()[inds]).reshape(shape)
       del w
       dist = dist.reshape(shape)
    else:
       if weight < 4:
          Sdat_g = (np.nansum(w * S.flatten()[inds], axis=1) / np.nansum(w, axis=1)).reshape(shape)
       else:
          Sdat_g = (np.nansum(S.flatten()[inds], axis=1)).reshape(shape)
       del w
       dist = np.nanmean(dist,axis=1).reshape(shape)

    del S

    Sdat_g[dist>1] = np.nan
    Sdat_g[Sdat_g<noisefloor] = np.nan

    dat = Sdat_g.copy()
    dat[dist>1] = 0
    dat2 = replace_nans.RN(dat.astype('float64'),1000,0.01,2,'localmean').getdata()
    dat2[dat==0] = np.nan
    del dat

    dat2[dat2<noisefloor] = np.nan

    Sdat_g = dat2.copy()
    del dat2
   
    Sdat_g[Sdat_g==0] = np.nan
    Sdat_g[np.isinf(Sdat_g)] = np.nan
    Sdat_gm = np.ma.masked_invalid(Sdat_g)
    del Sdat_g

    glon, glat = trans(grid_x, grid_y, inverse=True)
    del grid_x, grid_y
    
    # =========================================================
    print("creating kmz file ...")
    ## new way to create kml file  
    pixels = 1024 * 10
 
    fig, ax = humutils.gearth_fig(llcrnrlon=glon.min(),
                     llcrnrlat=glat.min(),
                     urcrnrlon=glon.max(),
                     urcrnrlat=glat.max(),
                     pixels=pixels)
    cs = ax.pcolormesh(glon, glat, Sdat_gm)
    ax.set_axis_off()
    fig.savefig(os.path.normpath(os.path.join(sonpath,'class_overlay1.png')), transparent=True, format='png')    
    

    fig = plt.figure(figsize=(1.0, 4.0), facecolor=None, frameon=False)
    ax = fig.add_axes([0.0, 0.05, 0.2, 0.9])
    cb = fig.colorbar(cs, cax=ax)
    cb.set_label('Texture lengthscale [m]', rotation=-90, color='k', labelpad=20)
    fig.savefig(os.path.normpath(os.path.join(sonpath,'class_legend.png')), transparent=False, format='png')  


    humutils.make_kml(llcrnrlon=glon.min(), llcrnrlat=glat.min(),
         urcrnrlon=glon.max(), urcrnrlat=glat.max(),
         figs=[os.path.normpath(os.path.join(sonpath,'class_overlay1.png'))], 
         colorbar=os.path.normpath(os.path.join(sonpath,'class_legend.png')),
         kmzfile=os.path.normpath(os.path.join(sonpath,'class_GroundOverlay.kmz')), 
         name='Sidescan Intensity')


    # =========================================================
    print("drawing and printing map ...")
    fig = plt.figure(frameon=False)
    map = Basemap(projection='merc', epsg=cs2cs_args.split(':')[1], 
     resolution = 'i', #h #f
     llcrnrlon=np.min(humlon)-0.001, llcrnrlat=np.min(humlat)-0.001,
     urcrnrlon=np.max(humlon)+0.001, urcrnrlat=np.max(humlat)+0.001)

    gx,gy = map.projtran(glon, glat)
       
    try:
       map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='ESRI_Imagery_World_2D', xpixels=1000, ypixels=None, dpi=300)
    except:
       map.arcgisimage(server='http://server.arcgisonline.com/ArcGIS', service='World_Imagery', xpixels=1000, ypixels=None, dpi=300)
    #finally:
    #   print "error: map could not be created..."
      
    ax = plt.Axes(fig, [0., 0., 1., 1.], )
    ax.set_axis_off()
    fig.add_axes(ax)

    if Sdat_gm.size > 25000000:
       print("matrix size > 25,000,000 - decimating by factor of 5 for display")
       map.pcolormesh(gx[::5,::5], gy[::5,::5], Sdat_gm[::5,::5], vmin=np.nanmin(Sdat_gm), vmax=np.nanmax(Sdat_gm))
    else:
       map.pcolormesh(gx, gy, Sdat_gm, vmin=np.nanmin(Sdat_gm), vmax=np.nanmax(Sdat_gm))

    custom_save2(sonpath,'class_map_imagery')
    del fig 

   
    if os.name=='posix': # true if linux/mac
       elapsed = (time.time() - start)
    else: # windows
       elapsed = (time.clock() - start)
    print("Processing took "+str(elapsed)+"seconds to analyse")

    print("Done!")
コード例 #9
0
def map_texture(humfile, sonpath, cs2cs_args, res, mode, nn,
                numstdevs):  #influence = 10,
    '''
    Create plots of the texture lengthscale maps made in PyHum.texture module 
    using the algorithm detailed by Buscombe et al. (2015)
    This textural lengthscale is not a direct measure of grain size. Rather, it is a statistical 
    representation that integrates over many attributes of bed texture, of which grain size is the most important. 
    The technique is a physically based means to identify regions of texture within a sidescan echogram, 
    and could provide a basis for objective, automated riverbed sediment classification.

    Syntax
    ----------
    [] = PyHum.map_texture(humfile, sonpath, cs2cs_args, res, mode, nn, numstdevs)

    Parameters
    ----------
    humfile : str
       path to the .DAT file
    sonpath : str
       path where the *.SON files are
    cs2cs_args : int, *optional* [Default="epsg:26949"]
       arguments to create coordinates in a projected coordinate system
       this argument gets given to pyproj to turn wgs84 (lat/lon) coordinates
       into any projection supported by the proj.4 libraries
    res : float, *optional* [Default=0.5]
       grid resolution of output gridded texture map
    mode: int, *optional* [Default=3]
       gridding mode. 1 = nearest neighbour
                      2 = inverse weighted nearest neighbour
                      3 = Gaussian weighted nearest neighbour
    nn: int, *optional* [Default=64]
       number of nearest neighbours for gridding (used if mode > 1) 
    numstdevs: int, *optional* [Default = 4]
       Threshold number of standard deviations in texture lengthscale per grid cell up to which to accept 
           
    Returns
    -------
    sonpath+'x_y_class'+str(p)+'.asc' : text file
        contains the point cloud of easting, northing, and texture lengthscales
        of the pth chunk

    sonpath+'class_GroundOverlay'+str(p)+'.kml': kml file
        contains gridded (or point cloud) texture lengthscale map for importing into google earth
        of the pth chunk

    sonpath+'class_map'+str(p)+'.png' : 
        image overlay associated with the kml file

    sonpath+'class_map_imagery'+str(p)+'.png' : png image file
        gridded (or point cloud) texture lengthscale map
        overlain onto an image pulled from esri image server

    References
    ----------
      .. [1] Buscombe, D., Grams, P.E., and Smith, S.M.C., 2015, Automated riverbed sediment
       classification using low-cost sidescan sonar. Journal of Hydraulic Engineering 10.1061/(ASCE)HY.1943-7900.0001079, 06015019.
    '''

    # prompt user to supply file if no input file given
    if not humfile:
        print('An input file is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        humfile = askopenfilename(filetypes=[("DAT files", "*.DAT")])

    # prompt user to supply directory if no input sonpath is given
    if not sonpath:
        print('A *.SON directory is required!!!!!!')
        Tk().withdraw(
        )  # we don't want a full GUI, so keep the root window from appearing
        sonpath = askdirectory()

    # print given arguments to screen and convert data type where necessary
    if humfile:
        print('Input file is %s' % (humfile))

    if sonpath:
        print('Sonar file path is %s' % (sonpath))

    if cs2cs_args:
        print('cs2cs arguments are %s' % (cs2cs_args))

    if res:
        res = np.asarray(res, float)
        print('Gridding resolution: %s' % (str(res)))

    if mode:
        mode = int(mode)
        print('Mode for gridding: %s' % (str(mode)))

    if nn:
        nn = int(nn)
        print('Number of nearest neighbours for gridding: %s' % (str(nn)))

    #if influence:
    #   influence = int(influence)
    #   print 'Radius of influence for gridding: %s (m)' % (str(influence))

    if numstdevs:
        numstdevs = int(numstdevs)
        print(
            'Threshold number of standard deviations in texture lengthscale per grid cell up to which to accept: %s'
            % (str(numstdevs)))

    # start timer
    if os.name == 'posix':  # true if linux/mac or cygwin on windows
        start = time.time()
    else:  # windows
        start = time.clock()

    trans = pyproj.Proj(init=cs2cs_args)

    # if son path name supplied has no separator at end, put one on
    if sonpath[-1] != os.sep:
        sonpath = sonpath + os.sep

    base = humfile.split('.DAT')  # get base of file name for output
    base = base[0].split(os.sep)[-1]

    # remove underscores, negatives and spaces from basename
    base = humutils.strip_base(base)

    meta = loadmat(os.path.normpath(os.path.join(sonpath, base + 'meta.mat')))

    esi = np.squeeze(meta['e'])
    nsi = np.squeeze(meta['n'])

    pix_m = np.squeeze(meta['pix_m']) * 1.1
    dep_m = np.squeeze(meta['dep_m'])
    c = np.squeeze(meta['c'])
    #dist_m = np.squeeze(meta['dist_m'])

    theta = np.squeeze(meta['heading']) / (180 / np.pi)

    # load memory mapped scans
    shape_port = np.squeeze(meta['shape_port'])
    if shape_port != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_port_lar.dat'))):
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_lar.dat',
                                       'float32', tuple(shape_port))
        else:
            port_fp = io.get_mmap_data(sonpath, base, '_data_port_la.dat',
                                       'float32', tuple(shape_port))

    shape_star = np.squeeze(meta['shape_star'])
    if shape_star != '':
        if os.path.isfile(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_star_lar.dat'))):
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_lar.dat',
                                       'float32', tuple(shape_star))
        else:
            star_fp = io.get_mmap_data(sonpath, base, '_data_star_la.dat',
                                       'float32', tuple(shape_star))

    if len(shape_star) > 2:
        shape = shape_port.copy()
        shape[1] = shape_port[1] + shape_star[1]
        class_fp = io.get_mmap_data(sonpath, base, '_data_class.dat',
                                    'float32', tuple(shape))
        #with open(os.path.normpath(os.path.join(sonpath,base+'_data_class.dat')), 'r') as ff:
        #   class_fp = np.memmap(ff, dtype='float32', mode='r', shape=tuple(shape))
    else:
        with open(
                os.path.normpath(
                    os.path.join(sonpath, base + '_data_class.dat')),
                'r') as ff:
            class_fp = np.load(ff)

    tvg = ((8.5 * 10**-5) + (3 / 76923) + ((8.5 * 10**-5) / 4)) * c
    dist_tvg = ((np.tan(np.radians(25))) * dep_m) - (tvg)

    if len(shape_star) > 2:
        for p in range(len(class_fp)):

            e = esi[shape_port[-1] * p:shape_port[-1] * (p + 1)]
            n = nsi[shape_port[-1] * p:shape_port[-1] * (p + 1)]
            t = theta[shape_port[-1] * p:shape_port[-1] * (p + 1)]
            d = dist_tvg[shape_port[-1] * p:shape_port[-1] * (p + 1)]

            len_n = len(n)

            merge = class_fp[p].copy()

            merge[np.isnan(merge)] = 0
            merge[np.isnan(np.vstack((np.flipud(port_fp[p]), star_fp[p])))] = 0

            extent = shape_port[1]
            R1 = merge[extent:, :]
            R2 = np.flipud(merge[:extent, :])

            merge = np.vstack((R2, R1))
            del R1, R2

            # get number pixels in scan line
            extent = int(np.shape(merge)[0] / 2)

            yvec = np.linspace(pix_m, extent * pix_m, extent)

            X, Y = getXY(e, n, yvec, d, t, extent)

            merge[merge == 0] = np.nan

            if len(merge.flatten()) != len(X):
                merge = merge[:, :len_n]

            merge = merge.T.flatten()

            index = np.where(np.logical_not(np.isnan(merge)))[0]

            X, Y, merge = trim_xys(X, Y, merge, index)

            X = X.astype('float32')
            Y = Y.astype('float32')
            merge = merge.astype('float32')

            # write raw bs to file
            outfile = os.path.normpath(
                os.path.join(sonpath, 'x_y_class' + str(p) + '.asc'))
            with open(outfile, 'w') as f:
                np.savetxt(f,
                           np.hstack((humutils.ascol(X), humutils.ascol(Y),
                                      humutils.ascol(merge))),
                           delimiter=' ',
                           fmt="%8.6f %8.6f %8.6f")

            humlon, humlat = trans(X, Y, inverse=True)

            #if dogrid==1:

            orig_def, targ_def, grid_x, grid_y, res, shape = get_griddefs(
                np.min(X), np.max(X), np.min(Y), np.max(Y), res, humlon,
                humlat, trans)

            grid_x = grid_x.astype('float32')
            grid_y = grid_y.astype('float32')

            sigmas = 1  #m
            eps = 2
            dat, res = get_grid(mode, orig_def, targ_def, merge, res * 10,
                                np.min(X), np.max(X), np.min(Y), np.max(Y),
                                res, nn, sigmas, eps, shape, numstdevs, trans,
                                humlon, humlat)

            del merge

            dat[dat == 0] = np.nan
            dat[np.isinf(dat)] = np.nan

            datm = np.ma.masked_invalid(dat)
            del dat

            glon, glat = trans(grid_x, grid_y, inverse=True)
            del grid_x, grid_y

            vmin = np.nanmin(datm) + 0.1
            vmax = np.nanmax(datm) - 0.1
            if vmin > vmax:
                vmin = np.nanmin(datm)
                vmax = np.nanmax(datm)

            print_map(cs2cs_args,
                      glon,
                      glat,
                      datm,
                      sonpath,
                      p,
                      vmin=vmin,
                      vmax=vmax)

    else:  #just 1 chunk

        e = esi
        n = nsi
        t = theta
        d = dist_tvg

        len_n = len(n)

        merge = class_fp.copy()

        merge[np.isnan(merge)] = 0
        merge[np.isnan(np.vstack((np.flipud(port_fp), star_fp)))] = 0

        extent = shape_port[0]
        R1 = merge[extent:, :]
        R2 = np.flipud(merge[:extent, :])

        merge = np.vstack((R2, R1))
        del R1, R2

        # get number pixels in scan line
        extent = int(np.shape(merge)[0] / 2)

        yvec = np.linspace(pix_m, extent * pix_m, extent)

        X, Y = getXY(e, n, yvec, d, t, extent)

        merge[merge == 0] = np.nan

        if len(merge.flatten()) != len(X):
            merge = merge[:, :len_n]

        merge = merge.T.flatten()

        index = np.where(np.logical_not(np.isnan(merge)))[0]

        X, Y, merge = trim_xys(X, Y, merge, index)

        # write raw bs to file
        outfile = os.path.normpath(
            os.path.join(sonpath, 'x_y_class' + str(0) + '.asc'))
        with open(outfile, 'w') as f:
            np.savetxt(f,
                       np.hstack((humutils.ascol(X), humutils.ascol(Y),
                                  humutils.ascol(merge))),
                       delimiter=' ',
                       fmt="%8.6f %8.6f %8.6f")

        humlon, humlat = trans(X, Y, inverse=True)

        #if dogrid==1:
        if 2 > 1:

            orig_def, targ_def, grid_x, grid_y, res, shape = get_griddefs(
                np.min(X), np.max(X), np.min(Y), np.max(Y), res, humlon,
                humlat, trans)

            ## create mask for where the data is not
            tree = KDTree(np.c_[X.flatten(), Y.flatten()])

            if pykdtree == 1:
                dist, _ = tree.query(np.c_[grid_x.ravel(),
                                           grid_y.ravel()],
                                     k=1)
            else:
                try:
                    dist, _ = tree.query(np.c_[grid_x.ravel(),
                                               grid_y.ravel()],
                                         k=1,
                                         n_jobs=cpu_count())
                except:
                    #print ".... update your scipy installation to use faster kd-tree queries"
                    dist, _ = tree.query(np.c_[grid_x.ravel(),
                                               grid_y.ravel()],
                                         k=1)

            dist = dist.reshape(grid_x.shape)

            sigmas = 1  #m
            eps = 2
            dat, res = get_grid(mode, orig_def, targ_def, merge, res * 10,
                                np.min(X), np.max(X), np.min(Y), np.max(Y),
                                res, nn, sigmas, eps, shape, numstdevs, trans,
                                humlon, humlat)

            del merge

        #if dogrid==1:
        if 2 > 1:
            dat[dat == 0] = np.nan
            dat[np.isinf(dat)] = np.nan
            dat[dist > res * 2] = np.nan
            del dist

            datm = np.ma.masked_invalid(dat)

            glon, glat = trans(grid_x, grid_y, inverse=True)
            del grid_x, grid_y

        vmin = np.nanmin(datm) + 0.1
        vmax = np.nanmax(datm) - 0.1
        if vmin > vmax:
            vmin = np.nanmin(datm)
            vmax = np.nanmax(datm)

        Parallel(n_jobs=2, verbose=0)(delayed(doplots)(k,
                                                       humlon,
                                                       humlat,
                                                       cs2cs_args,
                                                       glon,
                                                       glat,
                                                       datm,
                                                       sonpath,
                                                       0,
                                                       vmin=vmin,
                                                       vmax=vmax)
                                      for k in range(2))

        #print_map(cs2cs_args, glon, glat, datm, sonpath, 0, vmin=vmin, vmax=vmax)

        #print_contour_map(cs2cs_args, humlon, humlat, glon, glat, datm, sonpath, 0, vmin=vmin, vmax=vmax)

    if os.name == 'posix':  # true if linux/mac
        elapsed = (time.time() - start)
    else:  # windows
        elapsed = (time.clock() - start)
    print("Processing took " + str(elapsed) + "seconds to analyse")

    print("Done!")
    print("===================================================")