profile_dict['image2_snowIndex'] = ndsi2mean # add mean snow index profile for image 1
									
				profiles.append(profile_dict) # add to list

	# Translate timeslices across space (profiles) to timeseries across time at a point (pts)
	pts = wgt.convertProfilesListToPointTimeseries(profiles)

	# Collect all transect data to write out
	timeNow = dt.datetime.now()
	profile_data={ 'sample_pts_lon_lat':sample_pts_lon_lat, 'sample_pts_PS':sample_pts_PS, 'sample_pts_frontdist':sample_pts_frontdist.tolist(),\
					'sampled_on_date_iso':timeNow.isoformat(), 'profile_shapefile':profile_shapefile, 'profile_shapefile_dir':profile_shapefile_dir,\
					'profile_shapefile_projection_Proj':originalSrsProj, 'profiles':profiles, 'pts':pts, 'transect_name':transName }

	# Write transect velocity data to json file
	jsonFnOut = transName + '_swathVelocitySampling_' + timeNow.strftime("%Y-%m-%d") + '.json'
	wgt.writeJsonFile(profile_data,jsonFnOut)


## ANALYSIS AND PLOTTING

for featNum in range(0,lyr.GetFeatureCount()): # iterate over all transects
	# just hacking these for testing - will change to loop	
	#feat = lyr.GetFeature(59) # kaskWB if using the epsg102006 transects
	#feat = lyr.GetFeature(19) # kaskWB if using the st elias utm7n transects
	#feat = lyr.GetFeature(8) # nabeEB if using the wrangells utm7n transects
	#featNum = 17 # nabeEB if using new WStE transects
	feat = lyr.GetFeature(featNum) # get feature
	lineGeom = feat.geometry() # get geometry of profile line
	transName = feat.GetField(1) # transect name
	
	# Get json fn (assumes only one in this directory for each transect)
	summerTiming = {'doys':suDoys,'midDates':suMidDates,'startDates':suStarts,'endDates':suEnds,'dDays':suDts}
	winterTiming = {'doys':wiDoys,'midDates':wiMidDates,'startDates':wiStarts,'endDates':wiEnds,'dDays':wiDts}	
					
	# Save to dict for writing out
	velocityChangeDict[transName] = speedupDictNow
	velocityChangeDict2[transName] = speedupDictNow2
	allSeasonalVelocities[transName]=seasonalVelocityDictToJson
	allGlacierTiming[transName]={'summer':summerTiming,'winter':winterTiming}
	# Plot seasonal velocity data
	#wgt.plotSeasonalVelocity(seasonalVelocityDict2,speedupDictNow2,termDist,1,1)
	#wgt.plotSeasonalSpeedup(speedupDictNow2,termDist,1,1)	


# Write timing out
jsonFnOut0 = 'timingDict_fullyAutomated_27jan2017.json'
wgt.writeJsonFile(allGlacierTiming,jsonFnOut0)
	
# Write  speedup plots out
jsonFnOut1 = 'speedupDict_fullyAutomated_28nov2016.json'
jsonFnOut2 = 'speedupDict_2iterTempFilter_fullyAutomated_28nov2016.json'
wgt.writeJsonFile(velocityChangeDict,jsonFnOut1)
wgt.writeJsonFile(velocityChangeDict2,jsonFnOut2)


# Write seasonal velocity out
jsonFnOut3 = 'seasonalVelocityProfiles_2iterTempFilter_fullyAutomated_24jan2017.json'
wgt.writeJsonFile(allSeasonalVelocities,jsonFnOut3)


# Define speedup distance
from scipy.stats import ranksums
                profiles.append(profile_dict)  # add to list

    # Translate timeslices across space (profiles) to timeseries across time at a point (pts)
    pts = wgt.convertProfilesListToPointTimeseries(profiles)

    # Collect all transect data to write out
    timeNow = dt.datetime.now()
    profile_data={ 'sample_pts_lon_lat':sample_pts_lon_lat, 'sample_pts_PS':sample_pts_PS, 'sample_pts_frontdist':sample_pts_frontdist.tolist(),\
        'sampled_on_date_iso':timeNow.isoformat(), 'profile_shapefile':profile_shapefile, 'profile_shapefile_dir':profile_shapefile_dir,\
        'profile_shapefile_projection_Proj':originalSrsProj, 'profiles':profiles, 'pts':pts, 'transect_name':transName }

    # Write transect velocity data to json file
    jsonFnOut = transName + '_swathVelocitySampling_' + timeNow.strftime(
        "%Y-%m-%d") + '.json'
    wgt.writeJsonFile(profile_data, jsonFnOut)

## ANALYSIS AND PLOTTING

for featNum in range(0, lyr.GetFeatureCount()):  # iterate over all transects
    # just hacking these for testing - will change to loop
    #feat = lyr.GetFeature(59) # kaskWB if using the epsg102006 transects
    #feat = lyr.GetFeature(19) # kaskWB if using the st elias utm7n transects
    #feat = lyr.GetFeature(8) # nabeEB if using the wrangells utm7n transects
    #featNum = 17 # nabeEB if using new WStE transects
    feat = lyr.GetFeature(featNum)  # get feature
    lineGeom = feat.geometry()  # get geometry of profile line
    transName = feat.GetField(1)  # transect name

    # Get json fn (assumes only one in this directory for each transect)
    try:
예제 #4
0
    alpha = np.append(alpha, dz / dx)

    elevDict[transName] = {
        'dist': dist.tolist(),
        'slope': alpha.tolist(),
        'elevMean': zStats['means'],
        'elevMin': zStats['mins'],
        'elevMax': zStats['maxs'],
        'elevStd': zStats['stds'],
        'elevCount': zStats['counts']
    }

# Write transect velocity data to json file
jsonFnOut = os.path.split(
    albersCenterlinesFn)[1][:-4] + '_elevationProfiles.json'
wgt.writeJsonFile(elevDict, jsonFnOut)

## PLOTTING ELEVATION PROFILES
color_idx = np.linspace(0, 1, len(elevDict))
cols = plt.cm.GnBu(color_idx)
transIter = elevDict.iterkeys()
meanMins = []
meanMeans = []
meanMaxs = []
meanDists = []

for i in range(0, len(elevDict)):
    transName = transIter.next()
    elevNow = elevDict[transName]
    randInd = np.random.randint(0, len(elevDict))
    plt.plot(np.array(elevNow['dist']) / 1e3,
예제 #5
0
	# 		plt.text(-.19,-.13,'UG rmse: ' + "{0:.3f}".format(rmseUp))				
	# 		plt.text(-.19,-.09,'DG r2: ' + "{0:.3f}".format(r2down))
	# 		plt.text(-.19,-0.03,'DG rmse: ' + "{0:.3f}".format(rmseDown))						
			plt.xlim((-.2,.2))
			plt.ylim((-.2,.2))	
			plt.xlabel('Observed speedup [m/d]',fontsize=16)
			plt.ylabel('Best linear \n fit speedup [m/d]',fontsize=16)				
			plt.savefig(transNow + '_quantifySpeedupPlot_relToSnowline.pdf')
			#plt.show()
			plt.close()				
		
	except KeyError:
		print "Key error on " + transNow
		ranksumExists = 0
		
wgt.writeJsonFile(quantifySpeedupDict,'quantifySpeedupDict_withSnowline_handTreated_07feb2017.json')


# Look at all glaciers together
#data = wgt.readJsonFile('/Users/wiar9509/Google Drive/wrangells/json/quantifySpeedupDict_07feb2017.json')

qSpeed = wgt.readJsonFile('/Users/wiar9509/Google Drive/wrangells/json/quantifySpeedupDict_withSnowline_handTreated_07feb2017.json')
# initialize
transNameList = []
speedMagListUp = []
speedMagListDown = []
speedSlopeListUp = []
speedSlopeListDown = []

color_idx = np.linspace(0, 1, len(qSpeed))
cols = plt.cm.Paired(color_idx)
	interpProfileCoords = wgt.readVertexCoordsFromPolyline(transName+'_swathSamplePoints.shp') # read vertex coordinates from interpolated line

	stats = wgt.zonal_stats(outDirectory + transName+'_samplePolygons.shp', albersDemFn) # swath profile along transect
	zStats = wgt.readZonalStatsOutput(stats) # read swath profile output
	
	dz = np.diff(zStats['means'])
	dx = np.diff(dist)
	
	alpha = np.nan
	alpha = np.append(alpha,dz/dx)
		
	elevDict[transName] = {'dist':dist.tolist(),'slope':alpha.tolist(),'elevMean':zStats['means'],'elevMin':zStats['mins'],'elevMax':zStats['maxs'],'elevStd':zStats['stds'],'elevCount':zStats['counts']}
	
# Write transect velocity data to json file
jsonFnOut = os.path.split(albersCenterlinesFn)[1][:-4] + '_elevationProfiles.json'
wgt.writeJsonFile(elevDict,jsonFnOut)	


## PLOTTING ELEVATION PROFILES
color_idx = np.linspace(0, 1, len(elevDict))
cols = plt.cm.GnBu(color_idx)
transIter = elevDict.iterkeys()
meanMins = []
meanMeans = []
meanMaxs = []
meanDists = []


for i in range(0,len(elevDict)):
	transName = transIter.next()
	elevNow = elevDict[transName]
예제 #7
0
    # 		plt.text(-.19,-.09,'DG r2: ' + "{0:.3f}".format(r2down))
    # 		plt.text(-.19,-0.03,'DG rmse: ' + "{0:.3f}".format(rmseDown))
            plt.xlim((-.2, .2))
            plt.ylim((-.2, .2))
            plt.xlabel('Observed speedup [m/d]', fontsize=16)
            plt.ylabel('Best linear \n fit speedup [m/d]', fontsize=16)
            plt.savefig(transNow + '_quantifySpeedupPlot_relToSnowline.pdf')
            #plt.show()
            plt.close()

    except KeyError:
        print "Key error on " + transNow
        ranksumExists = 0

wgt.writeJsonFile(
    quantifySpeedupDict,
    'quantifySpeedupDict_withSnowline_handTreated_07feb2017.json')

# Look at all glaciers together
#data = wgt.readJsonFile('/Users/wiar9509/Google Drive/wrangells/json/quantifySpeedupDict_07feb2017.json')

qSpeed = wgt.readJsonFile(
    '/Users/wiar9509/Google Drive/wrangells/json/quantifySpeedupDict_withSnowline_handTreated_07feb2017.json'
)
# initialize
transNameList = []
speedMagListUp = []
speedMagListDown = []
speedSlopeListUp = []
speedSlopeListDown = []