def runAll(rct_data_dir):
	outputFile = open("./results.csv", 'w')
	outputFile.write("Run,Collar,Collar Present,Median Filter Success,Found Collar,Failure Mode,leastsq iterations,leastsq Termination,Good Estimation,Score\n")
	for run_dir in sorted(os.listdir(rct_data_dir)):
		if os.path.isdir(run_dir):
			run_num = int(os.path.basename(run_dir).split('_')[1])
			collarDefinitionFile = os.path.join(run_dir, 'COL')
			if not os.path.isfile(collarDefinitionFile):
				print("Collar definitions not found!")
				continue
			num_Collars = file_len(collarDefinitionFile)
			for collarNumber in xrange(1, num_Collars + 1):
				results = checkRun(run_dir, collarNumber)

				groundTruth = read_meta_file(os.path.join(run_dir, 'TRUTH'), str(collarNumber))
				# Run Number, collar number, ground truth, medianFilter, CollarFound
				outputFile.write('%d,%d,%s,' % (run_num, collarNumber, groundTruth))
				if results[0]:
					outputFile.write('1,')
				else:
					outputFile.write('0,')
				if results[1]:
					outputFile.write('1,')
				else:
					outputFile.write('0,')
				if results[1]:
					# empty, leastsq iterations, leastsq term
					outputFile.write(',%d,%d,,' % (results[2], results[3]))
				else:
					# Failure mode
					outputFile.write('%s,,,,' % (results[4]))
				outputFile.write('\n')
	outputFile.close()
示例#2
0
def runAll(rct_data_dir):
    outputFile = open("./results.csv", 'w')
    outputFile.write(
        "Run,Collar,Collar Present,Median Filter Success,Found Collar,Failure Mode,leastsq iterations,leastsq Termination,Good Estimation,Score\n"
    )
    for run_dir in sorted(os.listdir(rct_data_dir)):
        if os.path.isdir(run_dir):
            run_num = int(os.path.basename(run_dir).split('_')[1])
            collarDefinitionFile = os.path.join(run_dir, 'COL')
            if not os.path.isfile(collarDefinitionFile):
                print("Collar definitions not found!")
                continue
            num_Collars = file_len(collarDefinitionFile)
            for collarNumber in xrange(1, num_Collars + 1):
                results = checkRun(run_dir, collarNumber)

                groundTruth = read_meta_file(os.path.join(run_dir, 'TRUTH'),
                                             str(collarNumber))
                # Run Number, collar number, ground truth, medianFilter, CollarFound
                outputFile.write('%d,%d,%s,' %
                                 (run_num, collarNumber, groundTruth))
                if results[0]:
                    outputFile.write('1,')
                else:
                    outputFile.write('0,')
                if results[1]:
                    outputFile.write('1,')
                else:
                    outputFile.write('0,')
                if results[1]:
                    # empty, leastsq iterations, leastsq term
                    outputFile.write(',%d,%d,,' % (results[2], results[3]))
                else:
                    # Failure mode
                    outputFile.write('%s,,,,' % (results[4]))
                outputFile.write('\n')
    outputFile.close()
def generateGraph(run_num, num_col, filename, output_path, col_def):
    # Get collar frequency
    col_freq = float(read_meta_file(col_def, str(num_col))) / 1.e6

    # make list of columns
    # Expects the csv to have the following columns: time, lat, lon, [collars]
    names = ['time', 'lat', 'lon', 'col', 'alt']

    # Read CSV
    data = np.genfromtxt(filename, delimiter=',', names=names)
    # Modify values
    lat = [x / 1e7 for x in data['lat']]
    lon = [y / 1e7 for y in data['lon']]
    col = data['col']
    alt = data['alt']

    # convert deg to utm
    zone = "X"
    zonenum = 60
    avgCol = np.average(col)
    stdDevCol = np.std(col)
    maxCol = np.amax(col)
    avgAlt = np.average(alt)
    stdAlt = np.std(alt)
    finalCol = []
    finalNorthing = []
    finalEasting = []
    for i in range(len(data['lat'])):
        if math.fabs(alt[i] - avgAlt) > stdAlt:
            continue
        finalCol.append(col[i])
        utm_coord = utm.from_latlon(lat[i], lon[i])
        finalEasting.append(utm_coord[0])
        finalNorthing.append(utm_coord[1])
        zonenum = utm_coord[2]
        zone = utm_coord[3]

    # Calculate heatmap
    print("Collar %d: Building median map..." % num_col)
    margin = 0
    pixelSize = 30 # meters per pixel
    tiffXSize = (int((max(finalEasting)) - int(min(finalEasting)) + margin * 2) / pixelSize + 1)
    tiffYSize = (int((max(finalNorthing)) - int(min(finalNorthing)) + margin * 2) / pixelSize + 1)
    heatMapArea = np.zeros((tiffYSize, tiffXSize)) # [y, x]
    refNorthing = max(finalNorthing) + margin
    minNorthing = refNorthing - (tiffYSize) * pixelSize
    refEasting = min(finalEasting) - margin
    maxEasting = refEasting + tiffXSize * pixelSize
    # print("min northing: %f" % minNorthing)
    # print("max northing: %f" % refNorthing)
    # print("min easting: %f" % refEasting)
    # print("max easting: %f" % maxEasting)
    # Xgeo = refEasting + pixelSize / 2+ Xpix
    # Ygeo = refNorthing - pixelSize / 2- Ypix

    # Plot data
    detectionRadius = 45
    maxLocation = [0, 0, detectionRadius]
    maxA = -100
    for x in xrange(tiffXSize):
        for y in xrange(tiffYSize):
            xgeo = refEasting + pixelSize / 2.0 + x * pixelSize
            ygeo = refNorthing - pixelSize / 2.0 - y * pixelSize
            medianCol = []
            gridCol = []
            for i in xrange(len(finalCol)):
                # if math.fabs(finalEasting[i] - xgeo) < detectionRadius and math.fabs(finalNorthing[i] - ygeo) < detectionRadius:
                if math.fabs(finalEasting[i] - xgeo) < detectionRadius and math.fabs(finalNorthing[i] - ygeo) < detectionRadius:
                    medianCol.append(finalCol[i])
            if len(medianCol) > 15:
                heatMapArea[y][x] = np.median(medianCol)
                if heatMapArea[y][x] > maxA:
                    maxLocation = [xgeo, ygeo, detectionRadius]
                    maxA = heatMapArea[y][x]
            else:
                heatMapArea[y][x] = 100

    # Save plot
    print("Collar %d: Saving median map..." % num_col)
    outputFileName = '%s/RUN_%06d_COL_%06d_median.tiff' % (output_path, run_num, num_col)
    driver = gdal.GetDriverByName('GTiff')
    dataset = driver.Create(
        outputFileName,
        tiffXSize,
        tiffYSize,
        1,
        gdal.GDT_Float32, ['COMPRESS=LZW'])

    spatialReference = osr.SpatialReference()
    spatialReference.SetUTM(zonenum, zone >= 'N')
    spatialReference.SetWellKnownGeogCS('WGS84')
    wkt = spatialReference.ExportToWkt()
    retval = dataset.SetProjection(wkt)
    dataset.SetGeoTransform((
        refEasting,    # 3
        pixelSize,                      # 4
        0,
        refNorthing,    # 0
        0,  # 1
        -pixelSize))                     # 2
    band = dataset.GetRasterBand(1)
    band.SetNoDataValue(100)
    # print(tiffXSize)
    # print(tiffYSize)
    # print(np.amin(heatMapArea))
    # print(np.amax(heatMapArea))
    # print(np.mean(heatMapArea))
    # print(np.std(heatMapArea))
    # print((heatMapArea > -30).sum())
    band.WriteArray(heatMapArea)
    band.SetStatistics(np.amin(heatMapArea), np.amax(heatMapArea), np.mean(heatMapArea), np.std(heatMapArea))
    dataset.FlushCache()
    dataset = None
    # if maxA > np.amin(heatMapArea):
    #     writer = shapefile.Writer(shapefile.POINT)
    #     writer.autoBalance = 1
    #     writer.field("lat", "F", 20, 18)
    #     writer.field("lon", "F", 20, 18)

    #     for i in xrange(len(finalCol)):
    #         if math.fabs(finalEasting[i] - maxLocation[0]) < maxLocation[2] and math.fabs(finalNorthing[i] - maxLocation[1]) < maxLocation[2]:
    #             lat, lon = utm.to_latlon(finalEasting[i], finalNorthing[i], zonenum, zone)
    #             writer.point(lon, lat)
    #             writer.record(lon, lat)


    #     writer.save('%s/RUN_%06d_COL_%06d_median_sel.shp' % (output_path, run_num, num_col))
    #     proj = open('%s/RUN_%06d_COL_%06d_median_sel.prj' % (output_path, run_num, num_col), "w")
    #     epsg1 = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
    #     proj.write(epsg1)
    #     proj.close()
    if maxA > np.amin(heatMapArea) + 0.5:
        print("Collar %d: Estimated location is %f, %f within %.0f meters" % (num_col, maxLocation[0], maxLocation[1], maxLocation[2]))
        # writer = shapefile.Writer(shapefile.POINT)
        # writer.autoBalance = 1
        # writer.field("lat", "F", 20, 18)
        # writer.field("lon", "F", 20, 18)
        # writer.field("radius", "F", 20, 18)

        # lat, lon = utm.to_latlon(maxLocation[0], maxLocation[1], zonenum, zone)
        # writer.point(lon, lat, maxLocation[2])
        # writer.record(lon, lat, maxLocation[2])


        # writer.save('%s/RUN_%06d_COL_%06d_median_pos.shp' % (output_path, run_num, num_col))
        # proj = open('%s/RUN_%06d_COL_%06d_median_pos.prj' % (output_path, run_num, num_col), "w")
        # epsg1 = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
        # proj.write(epsg1)
        # proj.close()
        return maxLocation
    else:
        print("Collar %d: no estimated location available!" % (num_col))
        return None
def generateGraph(run_num,
                  num_col,
                  filename,
                  output_path,
                  col_def,
                  alpha=-0.715,
                  beta=-14.51,
                  mean=0.0306,
                  sigma=6,
                  startLocation=None):
    # Get collar frequency
    col_freq = float(read_meta_file(col_def, str(num_col))) / 1.e6

    # make list of columns
    # Expects the csv to have the following columns: time, lat, lon, [collars]
    names = ['time', 'lat', 'lon', 'col', 'alt']

    # Read CSV
    data = np.genfromtxt(filename, delimiter=',', names=names)
    # Modify values
    lat = [x / 1e7 for x in data['lat']]
    lon = [y / 1e7 for y in data['lon']]
    col = data['col']
    alt = data['alt']

    # convert deg to utm
    zone = "X"
    zonenum = 60
    avgCol = np.average(col)
    stdDevCol = np.std(col)
    maxCol = np.amax(col)
    avgAlt = np.average(alt)
    stdAlt = np.std(alt)
    finalCol = []
    finalNorthing = []
    finalEasting = []
    finalRange = []
    finalAlt = []
    for i in xrange(len(col)):
        utm_coord = utm.from_latlon(lat[i], lon[i])
        lon[i] = utm_coord[0]
        lat[i] = utm_coord[1]
        zonenum = utm_coord[2]
        zone = utm_coord[3]

    # Generate histogram
    knownEmptyCollars = []
    medianCollars = []
    for i in xrange(len(col)):
        if startLocation is not None:
            rangeToMedian = math.sqrt((lon[i] - startLocation[0])**2.0 +
                                      (lat[i] - startLocation[1])**2.0)
            if rangeToMedian > startLocation[2] * 2:
                knownEmptyCollars.append(col[i])
            else:
                medianCollars.append(col[i])
    threshold = -43
    if len(medianCollars) > 0:
        threshold = np.amax(knownEmptyCollars)
    print("Collar %d: Using %f threshold" % (num_col, threshold))

    for i in range(len(data['lat'])):
        # if col[i] < avgCol + stdDevCol:
        if col[i] < threshold:
            continue
        if stdAlt < 5:
            if math.fabs(alt[i] - avgAlt) > stdAlt:
                continue
        else:
            if alt[i] < avgAlt - stdAlt:
                continue
        if startLocation is not None:
            rangeToMedian = math.sqrt((lon[i] - startLocation[0])**2.0 +
                                      (lat[i] - startLocation[1])**2.0)
            if rangeToMedian > startLocation[2] * 1.7:
                continue
        finalCol.append(col[i])
        finalEasting.append(lon[i])
        finalNorthing.append(lat[i])
        finalAlt.append(alt[i])
        finalRange.append(10**((alpha * col[i] + beta) / 10.0))
    if len(finalCol) == 0:
        print("Collar %d: No heatmap matches!" % num_col)
        return

    # Calculate heatmap
    print("Collar %d: Building heatmap..." % num_col)
    margin = 50
    tiffXSize = int(max(finalEasting)) - int(min(finalEasting)) + margin * 2
    tiffYSize = int(max(finalNorthing)) - int(min(finalNorthing)) + margin * 2
    pixelSize = 1
    heatMapArea = np.zeros((tiffYSize, tiffXSize))  # [y, x]
    minY = min(finalNorthing) - margin
    refY = max(finalNorthing) + margin
    refX = min(finalEasting) - margin
    maxX = max(finalEasting) + margin
    # print("min northing: %f" % minY)
    # print("max northing: %f" % refY)
    # print("min easting: %f" % refX)
    # print("max easting: %f" % maxX)
    # Xgeo = refY + X
    # Ygeo = refX - Ypix

    # Plot data
    for x in xrange(tiffXSize):
        for y in xrange(tiffYSize):
            for i in xrange(len(finalCol)):
                posRange = math.sqrt((refX + x - finalEasting[i])**2.0 +
                                     (refY - y - finalNorthing[i])**2.0 +
                                     finalAlt[i]**2.0)
                heatMapArea[y][x] = heatMapArea[y][x] + normalProbability(
                    posRange - finalRange[i], mean, 0.4 * finalRange[i])

    # Reshift up
    maxProbability = np.amax(heatMapArea)
    heatMapArea = heatMapArea - maxProbability
    heatMapArea = np.power(10, heatMapArea)

    # Save plot
    print("Collar %d: Saving heatmap..." % num_col)
    outputFileName = '%s/RUN_%06d_COL_%06d_heatmap.tiff' % (output_path,
                                                            run_num, num_col)
    driver = gdal.GetDriverByName('GTiff')
    dataset = driver.Create(outputFileName, tiffXSize, tiffYSize, 1,
                            gdal.GDT_Float32, ['COMPRESS=LZW'])

    spatialReference = osr.SpatialReference()
    spatialReference.SetUTM(zonenum, zone >= 'N')
    spatialReference.SetWellKnownGeogCS('WGS84')
    wkt = spatialReference.ExportToWkt()
    retval = dataset.SetProjection(wkt)
    dataset.SetGeoTransform((
        refX,  # 3
        1,  # 4
        0,
        refY,  # 0
        0,  # 1
        -1))  # 2
    band = dataset.GetRasterBand(1)
    # band.SetNoDataValue(100)
    # print(tiffXSize)
    # print(tiffYSize)
    # print(np.amin(heatMapArea))
    # print(np.amax(heatMapArea))
    # print(np.mean(heatMapArea))
    # print(np.std(heatMapArea))
    # print((heatMapArea > -30).sum())
    band.WriteArray(heatMapArea)
    band.SetStatistics(np.amin(heatMapArea), np.amax(heatMapArea),
                       np.mean(heatMapArea), np.std(heatMapArea))
    dataset.FlushCache()
    dataset = None
def generateGraph(run_num, num_col, filename, output_path, col_def, startLocation = None):
    kml_output = False
    # TODO Fix test case
    plot_height = 6
    plot_width = 8
    plot_dpi = 72


    # Get collar frequency
    col_freq = float(read_meta_file(col_def, str(num_col))) / 1.e6

    # make list of columns
    # Expects the csv to have the following columns: time, lat, lon, [collars]
    names = ['time', 'lat', 'lon', 'col', 'alt']

    # Read CSV
    data = np.genfromtxt(filename, delimiter=',', names=names)
    # Modify values
    lat = [x / 1e7 for x in data['lat']]
    lon = [y / 1e7 for y in data['lon']]
    col = data['col']
    alt = data['alt']

    # convert deg to utm
    print("Collar %d: Loading data" % num_col)
    zone = "X"
    zonenum = 60
    avgCol = np.average(col)
    stdDevCol = np.std(col)
    maxCol = np.amax(col)
    avgAlt = np.median(alt)
    stdAlt = np.std(alt)
    finalCol = []
    finalNorthing = []
    finalEasting = []
    finalAlt = []
    for i in xrange(len(col)):
        utm_coord = utm.from_latlon(lat[i], lon[i])
        lon[i] = utm_coord[0]
        lat[i] = utm_coord[1]
        zonenum = utm_coord[2]
        zone = utm_coord[3]


    altRejectNorthing = []
    altRejectEasting = []
    # if stdDevCol < 2.0:
    if maxCol - (stdDevCol + avgCol) < 1.0:
        print("Collar %d: Not enough variation! No collar!" % num_col)
        return

    # Generate collar threshold
    threshold = 0
    if startLocation is not None:
        knownEmptyCollars = []
        medianCollars = []
        for i in xrange(len(col)):
            rangeToMedian = math.sqrt((lon[i] - startLocation[0]) ** 2.0 + (lat[i] - startLocation[1]) ** 2.0)
            if rangeToMedian > startLocation[2] * 2:
                knownEmptyCollars.append(col[i])
            else:
                medianCollars.append(col[i])
        if len(medianCollars) > 0:
            threshold = np.amax(knownEmptyCollars)
    else:
        histogram, edges = np.histogram(col)
        maxInd = np.argmax(histogram)
        maxBin = np.amax(histogram)
        histogramThreshold = 50
        if maxBin < 50:
            histogramThreshold = maxBin * 0.1
        threshold = edges[len(edges) - 1]
        for i in xrange(maxInd + 1, len(histogram)):
            if histogram[i] < histogramThreshold:
                threshold = edges[i + 1]
                break

    print("Collar %d: Using %f threshold" % (num_col, threshold))

    # Generate altitude threshold
    altHistogram, altHistEdges = np.histogram(alt)
    maxAltInd = np.argmax(altHistogram)
    minAltInd = maxAltInd
    for i in xrange(maxAltInd, 0, -1):
        if altHistogram[i] < altHistogram[minAltInd]:
            minAltInd = i


    for i in range(len(data['lat'])):
        # if col[i] < avgCol + stdDevCol:
        if col[i] < threshold:
            continue
        if stdAlt < 5:
            if math.fabs(alt[i] - avgAlt) > stdAlt:
                continue
        else:
            if alt[i] < avgAlt - stdAlt:
                continue
        if startLocation is not None:
            rangeToMedian = math.sqrt((lon[i] - startLocation[0]) ** 2.0 + (lat[i] - startLocation[1]) ** 2.0)
            if rangeToMedian > startLocation[2] * 1.7:
                altRejectEasting.append(lon[i])
                altRejectNorthing.append(lat[i])
                continue
        finalCol.append(col[i])
        finalEasting.append(lon[i])
        finalNorthing.append(lat[i])
        finalAlt.append(alt[i])


    if len(finalCol) == 0:
        print("Collar %d: No matches!" % num_col)
        return

    if np.amax(finalCol) - np.amin(finalCol) < 1:
        print("Collar %d: Not enough variation! No collar!" % num_col)
        return
    print("Collar %d: Collar data range: %f" % (num_col, np.amax(finalCol) - np.amin(finalCol)))

    # writer = shapefile.Writer(shapefile.POINT)
    # writer.autoBalance = 1
    # writer.field("lat", "F", 20, 18)
    # writer.field("lon", "F", 20, 18)
    # writer.field("alt", "F", 20, 18)
    # writer.field("measurement", "F", 18, 18)

    # for i in xrange(len(finalCol)):
    #     #Latitude, longitude, elevation, measurement
    #     lat, lon = utm.to_latlon(finalEasting[i], finalNorthing[i], zonenum, zone)
    #     writer.point(lon, lat, finalAlt[i], finalCol[i])
    #     writer.record(lon, lat, finalAlt[i], finalCol[i])


    # writer.save('%s/RUN_%06d_COL_%06d_pos.shp' % (output_path, run_num, num_col))
    # proj = open('%s/RUN_%06d_COL_%06d_pos.prj' % (output_path, run_num, num_col), "w")
    # epsg1 = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
    # proj.write(epsg1)
    # proj.close()

    # if len(altRejectEasting) > 0:
    #     writer = shapefile.Writer(shapefile.POINT)
    #     writer.autoBalance = 1
    #     writer.field("lat", "F", 20, 18)
    #     writer.field("lon", "F", 20, 18)

    #     for i in xrange(len(altRejectEasting)):
    #         #Latitude, longitude
    #         lat, lon = utm.to_latlon(altRejectEasting[i], altRejectNorthing[i], zonenum, zone)
    #         writer.point(lon, lat)
    #         writer.record(lon, lat)


    #     writer.save('%s/RUN_%06d_COL_%06d_alt_reject.shp' % (output_path, run_num, num_col))
    #     proj = open('%s/RUN_%06d_COL_%06d_alt_reject.prj' % (output_path, run_num, num_col), "w")
    #     epsg1 = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
    #     proj.write(epsg1)
    #     proj.close()

    if len(finalCol) < 6:
        print("Collar %d: No collars detected!" % num_col)
        print("Collar %d: Only %d detections!" % (num_col, len(finalCol)))
        print("Collar %d: Average Collar Measurement: %d" % (num_col, avgCol))
        return

    # Data Analysis
    print("Collar %d: running estimation..." % num_col)
    x0 = [-0.715, -14.51, np.average(finalEasting[0]), np.average(finalNorthing[0])]
    res_x, res_cov_x, res_infodict, res_msg, res_ier = leastsq(residuals, x0, args=(finalCol, finalEasting, finalNorthing, finalAlt), full_output=1)
    easting = res_x[2]
    northing = res_x[3]
    # print("easting: %f" % easting)
    # print("northing: %f" % northing)
    lat_lon = utm.to_latlon(easting, northing, zonenum, zone_letter=zone)

    print("Collar %d: %d iterations" % (num_col, res_infodict['nfev']))

    if res_x[0] > 0:
        print("Collar %d: Collar model is invalid!" % num_col)
        print(res_x)
        return np.append(res_x, [0, 0, False])


    # if res_ier == 4:
    #     print("Collar %d: No collar detected - falloff not found!" % (num_col))
    #     res_x = np.append(res_x, [0, 0, False])
    #     return res_x
    # if res_ier == 5:
    #     print("Collar %d: No solution found!" % (num_col))
    #     print(res_x)
    #     res_x = np.append(res_x, [0, 0, False])
    #     return res_x
    print("Collar %d: ier %d; %s" % (num_col, res_ier, res_msg))

    print("Collar %d: Saving estimation..." % num_col)
    w = shapefile.Writer(shapefile.POINT)
    w.autoBalance = 1
    w.field("lat", "F", 20, 18)
    w.field("lon", "F", 20, 18)
    w.point(lat_lon[1], lat_lon[0]) #x, y (lon, lat)
    w.record(lat_lon[1], lat_lon[0]) #x, y (lon, lat)
    w.save('%s/RUN_%06d_COL_%06d_est.shp' % (output_path, run_num, num_col))

    prj = open('%s/RUN_%06d_COL_%06d_est.prj' % (output_path, run_num, num_col), "w")
    epsg = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
    prj.write(epsg)
    prj.close()

    # if res_cov_x is None:
    #     print("Collar %d: Collar position indeterminate! %s" % (num_col, res_msg))
    #     res_x = np.append(res_x, [0, 0, True])
    #     return res_x
    # s_sq = (residuals(res_x, finalCol, finalEasting, finalNorthing, finalAlt) ** 2).sum() / (len(finalCol) - len(x0))
    # pcov = res_cov_x * s_sq


    # Sigma estimation
    alpha = res_x[0]
    beta = res_x[1]
    errors = []
    for i in xrange(len(finalCol)):
        rangeToEstimate = math.sqrt((finalEasting[i] - easting) ** 2.0 + (finalNorthing[i] - northing) ** 2.0 + finalAlt[i] ** 2.0)
        modelRange = 10 ** ((alpha * finalCol[i] + beta) / 10.0)
        errors.append(rangeToEstimate - modelRange)
    errorSigma = np.std(errors)
    errorMean = np.average(errors)
    res_x = np.append(res_x, [errorMean, errorSigma, True])
    return res_x
        ]):
            try:
                os.remove(os.path.join(data_dir, curFile))
            except Exception, e:
                pass
        if fft_flag and any(curFile.lower().endswith(ext) for ext in ['.raw']):
            try:
                os.remove(os.path.join(data_dir, curFile))
            except Exception, e:
                pass

    # Get run number
    run = -1
    hasRun = False
    if os.path.isfile(runFileName):
        runString = read_meta_file.read_meta_file(runFileName, 'run_num')
        if runString is None:
            runString = getRunNum.getRunNum()
            if runString is None:
                exit()
        run = int(runString)
        runFile = open(runFileName, 'w')
        runFile.write("run_num: %s" % run)
        runFile.close()
        hasRun = True
    else:
        runString = getRunNum.getRunNum()
        if runString is None:
            exit()
        else:
            run = int(runString)
示例#7
0
			try:
				os.remove(os.path.join(data_dir, curFile))
			except Exception, e:
				pass
		if fft_flag and any(curFile.lower().endswith(ext) for ext in ['.raw']):
			try:
				os.remove(os.path.join(data_dir, curFile))
			except Exception, e:
				pass


	# Get run number
	run = -1
	hasRun = False
	if os.path.isfile(runFileName):
		runString = read_meta_file.read_meta_file(runFileName, 'run_num')
		if runString is None:
			runString = getRunNum.getRunNum()
			if runString is None:
				exit()
		run = int(runString)
		runFile = open(runFileName, 'w')
		runFile.write("run_num: %s" % run)
		runFile.close()
		hasRun = True
	else:
		runString = getRunNum.getRunNum()
		if runString is None:
			exit()
		else:
			run = int(runString)
示例#8
0
def generateGraph(run_num,
                  num_col,
                  filename,
                  output_path,
                  col_def,
                  startLocation=None):
    kml_output = False
    # TODO Fix test case
    plot_height = 6
    plot_width = 8
    plot_dpi = 72

    # Get collar frequency
    col_freq = float(read_meta_file(col_def, str(num_col))) / 1.e6

    # make list of columns
    # Expects the csv to have the following columns: time, lat, lon, [collars]
    names = ['time', 'lat', 'lon', 'col', 'alt']

    # Read CSV
    data = np.genfromtxt(filename, delimiter=',', names=names)
    # Modify values
    lat = [x / 1e7 for x in data['lat']]
    lon = [y / 1e7 for y in data['lon']]
    col = data['col']
    alt = data['alt']

    # convert deg to utm
    print("Collar %d: Loading data" % num_col)
    zone = "X"
    zonenum = 60
    avgCol = np.average(col)
    stdDevCol = np.std(col)
    maxCol = np.amax(col)
    avgAlt = np.median(alt)
    stdAlt = np.std(alt)
    finalCol = []
    finalNorthing = []
    finalEasting = []
    finalAlt = []
    for i in xrange(len(col)):
        utm_coord = utm.from_latlon(lat[i], lon[i])
        lon[i] = utm_coord[0]
        lat[i] = utm_coord[1]
        zonenum = utm_coord[2]
        zone = utm_coord[3]

    altRejectNorthing = []
    altRejectEasting = []
    # if stdDevCol < 2.0:
    if maxCol - (stdDevCol + avgCol) < 1.0:
        print("Collar %d: Not enough variation! No collar!" % num_col)
        return

    # Generate collar threshold
    threshold = 0
    if startLocation is not None:
        knownEmptyCollars = []
        medianCollars = []
        for i in xrange(len(col)):
            rangeToMedian = math.sqrt((lon[i] - startLocation[0])**2.0 +
                                      (lat[i] - startLocation[1])**2.0)
            if rangeToMedian > startLocation[2] * 2:
                knownEmptyCollars.append(col[i])
            else:
                medianCollars.append(col[i])
        if len(medianCollars) > 0:
            threshold = np.amax(knownEmptyCollars)
    else:
        histogram, edges = np.histogram(col)
        maxInd = np.argmax(histogram)
        maxBin = np.amax(histogram)
        histogramThreshold = 50
        if maxBin < 50:
            histogramThreshold = maxBin * 0.1
        threshold = edges[len(edges) - 1]
        for i in xrange(maxInd + 1, len(histogram)):
            if histogram[i] < histogramThreshold:
                threshold = edges[i + 1]
                break

    print("Collar %d: Using %f threshold" % (num_col, threshold))

    # Generate altitude threshold
    altHistogram, altHistEdges = np.histogram(alt)
    maxAltInd = np.argmax(altHistogram)
    minAltInd = maxAltInd
    for i in xrange(maxAltInd, 0, -1):
        if altHistogram[i] < altHistogram[minAltInd]:
            minAltInd = i

    for i in range(len(data['lat'])):
        # if col[i] < avgCol + stdDevCol:
        if col[i] < threshold:
            continue
        if stdAlt < 5:
            if math.fabs(alt[i] - avgAlt) > stdAlt:
                continue
        else:
            if alt[i] < avgAlt - stdAlt:
                continue
        if startLocation is not None:
            rangeToMedian = math.sqrt((lon[i] - startLocation[0])**2.0 +
                                      (lat[i] - startLocation[1])**2.0)
            if rangeToMedian > startLocation[2] * 1.7:
                altRejectEasting.append(lon[i])
                altRejectNorthing.append(lat[i])
                continue
        finalCol.append(col[i])
        finalEasting.append(lon[i])
        finalNorthing.append(lat[i])
        finalAlt.append(alt[i])

    if len(finalCol) == 0:
        print("Collar %d: No matches!" % num_col)
        return

    if np.amax(finalCol) - np.amin(finalCol) < 1:
        print("Collar %d: Not enough variation! No collar!" % num_col)
        return
    print("Collar %d: Collar data range: %f" %
          (num_col, np.amax(finalCol) - np.amin(finalCol)))

    # writer = shapefile.Writer(shapefile.POINT)
    # writer.autoBalance = 1
    # writer.field("lat", "F", 20, 18)
    # writer.field("lon", "F", 20, 18)
    # writer.field("alt", "F", 20, 18)
    # writer.field("measurement", "F", 18, 18)

    # for i in xrange(len(finalCol)):
    #     #Latitude, longitude, elevation, measurement
    #     lat, lon = utm.to_latlon(finalEasting[i], finalNorthing[i], zonenum, zone)
    #     writer.point(lon, lat, finalAlt[i], finalCol[i])
    #     writer.record(lon, lat, finalAlt[i], finalCol[i])

    # writer.save('%s/RUN_%06d_COL_%06d_pos.shp' % (output_path, run_num, num_col))
    # proj = open('%s/RUN_%06d_COL_%06d_pos.prj' % (output_path, run_num, num_col), "w")
    # epsg1 = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
    # proj.write(epsg1)
    # proj.close()

    # if len(altRejectEasting) > 0:
    #     writer = shapefile.Writer(shapefile.POINT)
    #     writer.autoBalance = 1
    #     writer.field("lat", "F", 20, 18)
    #     writer.field("lon", "F", 20, 18)

    #     for i in xrange(len(altRejectEasting)):
    #         #Latitude, longitude
    #         lat, lon = utm.to_latlon(altRejectEasting[i], altRejectNorthing[i], zonenum, zone)
    #         writer.point(lon, lat)
    #         writer.record(lon, lat)

    #     writer.save('%s/RUN_%06d_COL_%06d_alt_reject.shp' % (output_path, run_num, num_col))
    #     proj = open('%s/RUN_%06d_COL_%06d_alt_reject.prj' % (output_path, run_num, num_col), "w")
    #     epsg1 = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
    #     proj.write(epsg1)
    #     proj.close()

    if len(finalCol) < 6:
        print("Collar %d: No collars detected!" % num_col)
        print("Collar %d: Only %d detections!" % (num_col, len(finalCol)))
        print("Collar %d: Average Collar Measurement: %d" % (num_col, avgCol))
        return

    # Data Analysis
    print("Collar %d: running estimation..." % num_col)
    x0 = [
        -0.715, -14.51,
        np.average(finalEasting[0]),
        np.average(finalNorthing[0])
    ]
    res_x, res_cov_x, res_infodict, res_msg, res_ier = leastsq(
        residuals,
        x0,
        args=(finalCol, finalEasting, finalNorthing, finalAlt),
        full_output=1)
    easting = res_x[2]
    northing = res_x[3]
    # print("easting: %f" % easting)
    # print("northing: %f" % northing)
    lat_lon = utm.to_latlon(easting, northing, zonenum, zone_letter=zone)

    print("Collar %d: %d iterations" % (num_col, res_infodict['nfev']))

    if res_x[0] > 0:
        print("Collar %d: Collar model is invalid!" % num_col)
        print(res_x)
        return np.append(res_x, [0, 0, False])

    # if res_ier == 4:
    #     print("Collar %d: No collar detected - falloff not found!" % (num_col))
    #     res_x = np.append(res_x, [0, 0, False])
    #     return res_x
    # if res_ier == 5:
    #     print("Collar %d: No solution found!" % (num_col))
    #     print(res_x)
    #     res_x = np.append(res_x, [0, 0, False])
    #     return res_x
    print("Collar %d: ier %d; %s" % (num_col, res_ier, res_msg))

    print("Collar %d: Saving estimation..." % num_col)
    w = shapefile.Writer(shapefile.POINT)
    w.autoBalance = 1
    w.field("lat", "F", 20, 18)
    w.field("lon", "F", 20, 18)
    w.point(lat_lon[1], lat_lon[0])  #x, y (lon, lat)
    w.record(lat_lon[1], lat_lon[0])  #x, y (lon, lat)
    w.save('%s/RUN_%06d_COL_%06d_est.shp' % (output_path, run_num, num_col))

    prj = open(
        '%s/RUN_%06d_COL_%06d_est.prj' % (output_path, run_num, num_col), "w")
    epsg = 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
    prj.write(epsg)
    prj.close()

    # if res_cov_x is None:
    #     print("Collar %d: Collar position indeterminate! %s" % (num_col, res_msg))
    #     res_x = np.append(res_x, [0, 0, True])
    #     return res_x
    # s_sq = (residuals(res_x, finalCol, finalEasting, finalNorthing, finalAlt) ** 2).sum() / (len(finalCol) - len(x0))
    # pcov = res_cov_x * s_sq

    # Sigma estimation
    alpha = res_x[0]
    beta = res_x[1]
    errors = []
    for i in xrange(len(finalCol)):
        rangeToEstimate = math.sqrt((finalEasting[i] - easting)**2.0 +
                                    (finalNorthing[i] - northing)**2.0 +
                                    finalAlt[i]**2.0)
        modelRange = 10**((alpha * finalCol[i] + beta) / 10.0)
        errors.append(rangeToEstimate - modelRange)
    errorSigma = np.std(errors)
    errorMean = np.average(errors)
    res_x = np.append(res_x, [errorMean, errorSigma, True])
    return res_x