Example #1
0
import os
import sys

sys.path.append('/home/laser/rawdata/www/lib')
from Laser.Util import las

if __name__ == '__main__':
    """ fix incorrect return number and number of returns for given pulse syntax """
    import argparse
    parser = argparse.ArgumentParser(description='fix incorrect syntax for echoes in .ala files')
    parser.add_argument('--ala', dest='ala', required=True, help='path to input .ala file')
    parser.add_argument('--out', dest='out', required=True, help='path to cleaned output file')
    args = parser.parse_args()

    # init utility library
    util = las.rawdata()

    # open output file
    o = open(args.out,'w')

    # loop through input file, read pairs of line and clean up
    with open(args.ala) as f:
        prev_line = None
        curr_line = None
        for line in f:
            if not prev_line:
                prev_line = util.parse_line(line)
                continue
            else:
                curr_line = util.parse_line(line)
Example #2
0
def merge_first_last(first,last,out,dist):
    """ merge first and last echo files """

    # init vars
    prev_line = None
    curr_line = None
    lnum = 0

    stats = {
        'kept_first' : 0,
        'kept_last' : 0,
        'duplicated' : 0,
        'duplicated_distance' : 0,
    }

    # init utility library
    util = las.rawdata()

    # step 1: merge first and last echo files and sort them by GPS-time, first, last echo
    os.system("""awk '$1 = $1 FS "1"' %s > %s.tmp""" % (first,first))
    os.system("""awk '$1 = $1 FS "2"' %s > %s.tmp""" % (last,last))
    os.system("""cat %s.tmp %s.tmp | sort > %s.tmp""" % (first,last,out))

    # step 2: remove duplicates and supposed duplicates from merged file and create final output
    o = open(out,'w')
    with open("%s.tmp" % out) as f:
        for line in f:
            lnum += 1

            if not prev_line:
                prev_line = util.parse_line(line)
                prev_line.extend(['9','9']) # add two new columns that will hold return number and number of returns for given pulse
                continue
            else:
                curr_line = util.parse_line(line)
                curr_line.extend(['9','9']) # add two new columns that will hold return number and number of returns for given pulse

                # alter previous and current line in one turn if gpstimes are the same
                if prev_line[0] == curr_line[0]:
                    # set return number and number of returns of previous echo
                    prev_line[-2] = '1'
                    prev_line[-1] = '2'

                    # set return number and number of returns of current echo
                    curr_line[-2] = '2'
                    curr_line[-1] = '2'

                    # remove helper columns for sorting containig 1 for .alf and 2 for .all files
                    del prev_line[1]
                    del curr_line[1]

                    # write out both echos unless they are identical
                    if lines_are_identical(curr_line,prev_line):
                        prev_line[-1] = '1'
                        o.write('%s\n' % ' '.join(prev_line))
                        stats['kept_first'] += 1
                        stats['duplicated'] += 1
                    elif lines_are_supposed_to_be_identical(curr_line,prev_line,dist):
                        # keep last echo
                        curr_line[-2] = '1'
                        curr_line[-1] = '1'
                        o.write('%s\n' % ' '.join(curr_line))
                        stats['kept_first'] += 1
                        stats['duplicated_distance'] += 1
                    else:
                        o.write('%s\n' % ' '.join(prev_line))
                        o.write('%s\n' % ' '.join(curr_line))
                        stats['kept_first'] += 1
                        stats['kept_last'] += 1

                    # set previous line to NULL
                    prev_line = None

                    # continue with next line
                    continue

                else:
                    # write previous line
                    if prev_line[1] == '1':
                        # there is no second echo, write 1 1
                        prev_line[-2] = '1'
                        prev_line[-1] = '1'
                        stats['kept_first'] += 1
                    elif prev_line[1] == '2':
                        # well, this reveals an error in .all files containing a second echo when no corresponding first echo is found in the .alf file
                        # leave it as is for now ... an error
                        # counts for first and last echoes in PDF reports thus should be accurate in merged .ala files as well
                        prev_line[-2] = '2'
                        prev_line[-1] = '2'
                        stats['kept_last'] += 1

                    # remove helper column for sorting containig 1 for .alf and 2 for .all files
                    del prev_line[1]

                    # write out previous echo
                    o.write('%s\n' % ' '.join(prev_line))

                # assign current line as next previous line and continue
                prev_line = curr_line[:]

    # write last record from loop if any
    if prev_line:
        # remove helper column for sorting containig 1 for .alf and 2 for .all files
        if prev_line[1] == "1":
            stats['kept_first'] += 1
        else:
            stats['kept_last'] += 1

        del prev_line[1]

        # write out ...
        o.write('%s\n' % ' '.join(prev_line))

    # create log file
    with open("%s.txt" % out, "w") as log:
        log.write("the corresponding file was created with %s\n" % __file__)
        log.write("it contains filtered echoes from merged first and last echo files\n")
        log.write("duplicated points and points with identical GPS-time and intensities within a 3D-distance of %s meters have been removed\n" % dist)
        log.write("\n")
        log.write("input file with first echoes:   %s\n" % first)
        log.write("input file with last echoes:    %s\n" % last)
        log.write("output file with merged echoes: %s\n" % out)
        log.write("minimum point distance:         %s meters\n" % dist)
        log.write("\n")
        log.write("kept returns (all, 1, 2):       %s %s %s \n" % (stats['kept_first']+stats['kept_last'],stats['kept_first'],stats['kept_last']) )
        log.write("skipped real duplicates:        %s\n" % stats['duplicated'])
        log.write("skipped distance duplicates:    %s\n" % stats['duplicated_distance'])
        log.write("\n")

    # close output file
    o.close()

    # clean up temporary files
    os.remove("%s.tmp" % first)
    os.remove("%s.tmp" % last)
    os.remove("%s.tmp" % out)