def readtime(filename): metafile = open(filename + ".meta", "r") lines = metafile.readlines() try: if lines[0][0] == 'N': # Using nominal date time = float(lines[0][15:21]) interval = 'NaN' else: # Using Central Julian date jdate = float(lines[0][36:47]) # Get date year, month, day, fracday = jdcal.jd2gcal(jdate, 0) time = datelib.date_to_fracyear(year, month, day + fracday) # Get time interval for velocity (time of second image - time of first image) month1 = datelib.month(lines[1][32:35]) day1 = float(lines[1][36:38]) year1 = float(lines[1][39:43]) month2 = datelib.month(lines[2][33:36]) day2 = float(lines[2][37:39]) year2 = float(lines[2][40:44]) interval = datelib.date_to_fracyear( year2, month2, day2) - datelib.date_to_fracyear( year1, month1, day1) except: time = float('nan') interval = float('nan') return time, interval
def near_time(time, glacier, type='all'): ''' best_x,best_y,best_time=near_time(time,glacier) Inputs: time: fractional year when we want terminus position glacier: glacier name type: data source for ice front position (TSX, WV, Landsat, or all) Outputs: best_x,best_y: x,y coordinates for terminus position that is closest in time to "time" ''' # Ice front directory DIRI = os.path.join(os.getenv("DATA_HOME"), "ShapeFiles/IceFronts/" + glacier + "/") files = os.listdir(DIRI) if type == 'all': type = ['TSX', 'Landsat', 'ASTER', 'WV'] best_time = [] min_diff = 1.0 for file in files: if file.endswith('.shp') and (not "moon" in file): # Time of that terminus position if ("TSX" in file) and ("TSX" in type): icetime = datelib.doy_to_fracyear(float(file[0:4]), float(file[5:8])) elif ("ASTER" in file) and ("ASTER" in type): icetime = datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10])) elif ("Landsat" in file) and ("Landsat" in type): icetime = datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10])) elif ("WV" in file) and ("WV" in file): icetime = datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10])) if abs(icetime - time) < min_diff: best_x = np.zeros(0) best_y = np.zeros(0) min_diff = abs(icetime - time) sf = shapefile.Reader(DIRI + file) shapes = sf.shapes() for shape in shapes: try: termpts = np.array(shape.points[:]) best_x = np.r_[best_x, termpts[:, 0]] best_y = np.r_[best_y, termpts[:, 1]] except: pass best_time = icetime return best_x, best_y, best_time
def position(x, y, dists, glacier, time): ''' terminus = position(x,y,dists,glacier,time) Get the terminus position along the flowline for time "time." Inputs: x,y,dists: x,y coordinates and their distance along the flowline glacier: glacier name time: time when want the terminus position Outputs: terminus: distance of terminus position along flowline (using linear interpolation from the nearby picked terminus positions) ''' # Get all terminus positions terminus_val, terminus_time = distance_along_flowline(x, y, dists, glacier, type='icefront') if len(time) > 1: time = datelib.date_to_fracyear(time[0], time[1], time[2]) # Interpolate terminus position for the time terminus = np.interp(time, terminus_time, terminus_val) return terminus
def rosenau_landsat_at_pts(xpt, ypt, glacier, xy_velocities='False'): if glacier == 'Helheim': file = os.path.join( os.getenv("DATA_HOME"), "Velocity/Rosenau/Helheim/GRL_003_all.EPSG3413.vel_md.nc") elif glacier == 'Kanger': file = os.path.join( os.getenv("DATA_HOME"), "Velocity/Rosenau/Kanger/GRL_004_all.EPSG3413.vel_md.nc") data = netCDF4.Dataset(file) x = data.variables['x'][:] y = data.variables['y'][:] # Select data to load i1 = np.argmin(abs(np.min(xpt) - x - 1e3)) i2 = np.argmin(abs(np.max(xpt) - x + 1e3)) j1 = np.argmin(abs(np.min(ypt) - y - 1e3)) j2 = np.argmin(abs(np.max(ypt) - y + 1e3)) x = x[i1:i2] y = y[j1:j2] vx = data.variables['vx'][:, j1:j2, i1:i2] vy = data.variables['vy'][:, j1:j2, i1:i2] v = np.sqrt(vx**2 + vy**2) time = datelib.date_to_fracyear(1970, 1, 1) + data.variables['time'][:] / 365.25 try: n = len(xpt) except: n = 1 nt = len(time) tpt = time vxpt = np.zeros([nt, n]) vypt = np.zeros([nt, n]) vpt = np.zeros([nt, n]) for k in range(0, n): i = np.argmin(abs(xpt[k] - x)) j = np.argmin(abs(ypt[k] - y)) vxpt[:, k] = vx[:, j, i] * 365.25 vypt[:, k] = vy[:, j, i] * 365.25 vpt[:, k] = v[:, j, i] * 365.25 vxpt[vpt == 0] = float('nan') vxpt[vpt == 0] = float('nan') vpt[vpt == 0] = float('nan') if xy_velocities == 'True': return vpt, tpt, vxpt, vypt else: return vpt, tpt
def calving(glacier): ''' behavior=calving(glacier) Load files that state the type of "calving" for each satellite image. Inputs: glacier: glacier name Outputs: behavior: a column array of time, calvingstyle, satellite, file date ''' DIR = os.path.join(os.getenv("DATA_HOME"), "CalvingStyle/" + glacier + "/") value = [] type = [] file = [] time = [] fid = open(DIR + "Combined.dat") lines = fid.readlines() for line in lines: if not (line.startswith('#')): p = line.split() value.append(p[2]) type.append(p[1]) file.append(p[0]) if p[1] == 'Landsat' or p[1] == 'Worldview': time.append( datelib.date_to_fracyear(float(p[0][0:4]), float(p[0][5:7]), float(p[0][8:10]))) elif p[1] == 'TSX': time.append( datelib.doy_to_fracyear(float(p[0][0:4]), float(p[0][5:8]))) else: print "Not working, check ", p[0] # Make them arrays time = np.array(time) type = np.array(type) value = np.array(value) file = np.array(file) # Put it into a column array behavior = np.column_stack([time, value, type, file]) return behavior
def racmo_grid(xmin,xmax,ymin,ymax,variable,epsg=3413,maskvalues='ice',time1=-np.inf,time2=np.inf,resolution=11,fillin=True): ''' Pull all values for RACMO smb, t2m, zs, or runoff values for the region defined by xmin,xmax,ymin,ymax. xrac_subset,yrac_subset,var_subset,time = racmo_grid(xmin,xmax,ymin,ymax, variable,epsg=3413,mask='ice') Inputs: xmin,xmax,ymin,ymax : region where you want racmo data variable : what variable you want (runoff, t2m, zs, smb) maskvalues : if you want only 'ice' or 'notice' or 'both' values Outputs: xrac_subet,yrac_subset : output x,y var_subset: value of chosen variable at these points time : time ''' if (resolution == 1): if not('DATA2_HOME' in os.environ): sys.exit('No access to Data2 on this machine for downscaled RACMO. \n'+\ 'Try using resolution=11, but remember we only have it for 2001-2016.') dir = os.path.join(os.getenv("DATA2_HOME"),"Climate/RACMODownscaled/Downscaled_2000_2016/") if not(os.path.isdir(dir)): sys.exit('No access to downscaled RACMO data on this machine. \n'+\ 'Try using resolution=11, but remember we only have it for 2001-2016.') vardir = dir+variable+'/' mask = netCDF4.Dataset(dir+'Icemask_Topo_Iceclasses_lon_lat_average_1km.nc') files = os.listdir(vardir) for file in files: if file.startswith(variable): rec = netCDF4.Dataset(vardir+file) day1 = rec['time'].units[10:21] year,day1 = datelib.date_to_doy(int(day1[0:5]),int(day1[6:8]),int(day1[9:11])) days = np.array(rec['time'][:],dtype=np.float64) rectime = np.zeros_like(days) for i in range(0,len(days)): rectime[i] = datelib.doy_to_fracyear(year,day1+days[i]) indt = np.where((rectime >= time1) & (rectime <= time2))[0] if len(indt) > 0: print file try: indx except: xrec = np.array(rec['x'][:],dtype=np.float64) yrec = np.array(rec['y'][:],dtype=np.float64) indx = np.where((xrec >= xmin) & (xrec <= xmax))[0] indy = np.where((yrec >= ymin) & (yrec <= ymax))[0] xrac_subset = xrec[indx] yrac_subset = yrec[indy] mask_sub = mask['Icemask'][indy,indx] if variable == 'smb': varrec = rec['SMB_rec'][indt,indy,indx] elif variable == 'runoff': varrec = rec['runoffcorr'][indt,indy,indx] else: varrec = rec[variable][indt,indy,indx] try: var_subset except: var_subset = varrec time = rectime[indt] else: var_subset = np.row_stack([var_subset,varrec]) time = np.r_[time,rectime[indt]] if maskvalues == 'ice': ind = np.where(mask_sub <= 0) var_subset[:,ind[0],ind[1]] = np.float('nan') if fillin == True: ind_ice = np.where(mask_sub > 0) xrac_grid,yrac_grid = np.meshgrid(xrac_subset,yrac_subset) for j in range(0,len(time)): var_subset[j,ind[0],ind[1]] = scipy.interpolate.griddata((yrac_grid[ind_ice],xrac_grid[ind_ice]),\ var_subset[j,ind_ice[0],ind_ice[1]],(yrac_subset[ind[0]],xrac_subset[ind[1]]),method='nearest') # Sort by time ind = np.argsort(time) time = time[ind] var_subset = var_subset[ind,:,:] elif (resolution == 11): # RACMO data if variable != 'zs': files = [(os.path.join(os.getenv("DATA_HOME"),"Climate/RACMO/2015_09_Laura_Kehrl/RACMO2.3_GRN11_"+variable+"_daily_2001_2010.nc")), \ (os.path.join(os.getenv("DATA_HOME"),"Climate/RACMO/2015_09_Laura_Kehrl/RACMO2.3_GRN11_"+variable+"_daily_2011_2014.nc")), \ (os.path.join(os.getenv("DATA_HOME"),"Climate/RACMO/2015_09_Laura_Kehrl/RACMO2.3_GRN11_"+variable+"_daily_2015.nc"))] else: files = [(os.path.join(os.getenv("DATA_HOME"),"Climate/RACMO/ZS_ZGRN_V5_1960-2014_detrended_2day.nc"))] rec1 = netCDF4.Dataset(files[0]) if variable != 'zs': rec2 = netCDF4.Dataset(files[1]) rec3 = netCDF4.Dataset(files[2]) mask = netCDF4.Dataset(os.path.join(os.getenv("DATA_HOME"),"Climate/RACMO/2015_09_Laura_Kehrl/RACMO23_masks_ZGRN11.nc")).variables['icemask'][:] # Load RACMO data lat = np.array(rec1.variables['lat'][:]) lon = np.array(rec1.variables['lon'][:]) var1 = np.array(rec1.variables[variable][:]) daysfrom1950_1 = np.array(rec1.variables['time'][:]) if variable != 'zs': var2 = np.array(rec2.variables[variable][:]) daysfrom1950_2 = np.array(rec2.variables['time'][:]) var3 = np.array(rec3.variables[variable][:]) if variable != 't2m': var3 = np.array(var3)/(60*60*24.0) days2015 = np.array(rec3.variables['time'][:]) # Convert date to fractional year startday1950 = jdcal.gcal2jd(1950,1,1) Nt1 = len(daysfrom1950_1) if variable != 'zs': Nt2 = len(daysfrom1950_2) Nt3 = len(days2015) time = np.zeros(Nt1+Nt2+Nt3) for i in range(0,Nt1): year,month,day,fracday = jdcal.jd2gcal(startday1950[0],startday1950[1]+daysfrom1950_1[i]) time[i] = datelib.date_to_fracyear(year,month,day) for i in range(0,Nt2): year,month,day,fracday = jdcal.jd2gcal(startday1950[0],startday1950[1]+daysfrom1950_2[i]) time[i+Nt1] = datelib.date_to_fracyear(year,month,day) for i in range(0,Nt3): time[i+Nt1+Nt2] = datelib.doy_to_fracyear(2015,1+days2015[i]) else: time = daysfrom1950_1 time = time[0:-71] var1 = var1[0:-71,:,:] # Convert lat,lon to epsg 3413 xrac,yrac = coordlib.convert(lon,lat,4326,epsg) # Find x,y indices that fall within the desired grid and check to make sure that the chosen # indices fall on the ice mask (mask == 1) if maskvalues == 'ice': xind = np.where((xrac >= xmin) & (xrac <= xmax) & (mask == 1)) elif maskvalues == 'notice': xind = np.where((xrac >= xmin) & (xrac <= xmax) & (mask == 0)) elif maskvalues == 'both': xind = np.where((xrac >= xmin) & (xrac <= xmax)) else: sys.exit("Unknown maskvalues") xrac_subset = xrac[xind] yrac_subset = yrac[xind] if variable != 'zs': var1_subset = var1[:,:,xind[0],xind[1]] var2_subset = var2[:,:,xind[0],xind[1]] var3_subset = var3[:,xind[0],xind[1]] else: var1_subset = var1[:,xind[0],xind[1]] mask_subset = mask[xind[0],xind[1]] if maskvalues == 'ice': yind = np.where((yrac_subset >= ymin) & (yrac_subset <= ymax) & (mask_subset == 1)) elif maskvalues == 'notice': yind = np.where((yrac_subset >= ymin) & (yrac_subset <= ymax) & (mask_subset == 0)) elif maskvalues == 'both': yind = np.where((yrac_subset >= ymin) & (yrac_subset <= ymax)) xrac_subset = xrac_subset[yind] yrac_subset = yrac_subset[yind] if variable != 'zs': var1_subset = var1_subset[:,:,yind] var2_subset = var2_subset[:,:,yind] var3_subset = var3_subset[:,yind] var_subset = np.row_stack([var1_subset[:,0,0,:],var2_subset[:,0,0,:],var3_subset[:,0,:]]) else: var1_subset = var1_subset[:,yind] var_subset = var1_subset[:,0,:] if variable == 't2m': # Convert Kelvin to Celsius var_subset=var_subset elif variable == 'smb' or variable == 'precip' or variable == 'runoff': # If variable is smb, convert kg m-2 s-1 to kg m-2 d-1 var_subset=var_subset*(60*60*24.0) return xrac_subset,yrac_subset,var_subset,time
FORMAT='PDF', dpi=600) plt.close() ####################### # Helheim ungrounding # ####################### for i in range(0, 2): if i == 0: ximage, yimage, image = geotifflib.readrgb( os.path.join( os.getenv("DATA_HOME"), "Mosaics/Helheim/mosaicHelheim.2013-128.148.32713_1-20mgeo.tif" )) ind = np.argmin(abs(timewv_H - datelib.date_to_fracyear(2013, 5, 8))) else: ind = np.argmin(abs(timewv_H - datelib.date_to_fracyear(2014, 1, 27))) ximage, yimage, image = geotifflib.readrgb( os.path.join( os.getenv("DATA_HOME"), "Mosaics/Helheim/mosaicHelheim.2014-027.148.36721_1-20mgeo.tif" )) data = zabovefloat_H[:, ind] fig = plt.figure(figsize=(2.5, 2.5)) matplotlib.rc('font', family='Arial') ax = plt.gca() ax.imshow(image[:, :, 0], extent=[ np.min(ximage),
for i in range(0, len(DIRs)): dirs = os.listdir(DIRs[i]) for dir in dirs: area = (shapely.geometry.Polygon( np.loadtxt(DIRs[i] + dir + '/inputs/mesh_extent.dat'))).area try: fid = open(DIRs[i] + dir + '/mesh2d/inversion_adjoint/summary.dat', 'r') lines = fid.readlines() for line in lines: p = line.split() if p[0] == regpars[i]: dates[i].append(dir[3:11]) times[i].append( datelib.date_to_fracyear(int(dir[3:7]), int(dir[7:9]), int(dir[9:11]))) costs[i].append(float(p[3])) fid.close() except: print "failed " + dir fig = plt.figure(figsize=(6.5, 1.5)) matplotlib.rc('font', family='Arial') ax1 = plt.gca() plt.plot(SSA_CT_time, SSA_CT_cost, 'ks', markersize=4, markerfacecolor='orange', label='SSA-CT') plt.plot(SSA_MT_time,
if (ebase[i] > ebase_lims[0]) and (ebase[i] < ebase_lims[1]): cand_ebase.append(i) if (abs(hoa[i]) > hoa_lims[0]) and (abs(hoa[i]) < hoa_lims[1]): cand_hoa.append(i) # Find intersection of candidate indices cand_ind = list(set(cand_orbit).intersection(cand_ebase,cand_hoa)) # Get potential image pairs hoa_cand = hoa[cand_ind] ebase_cand = ebase[cand_ind] relorbit_cand = relorbit[cand_ind] absorbit_cand = absorbit[cand_ind] date_cand = date[cand_ind,:] scenenum_cand = scenenum[cand_ind] fracyear_cand = datelib.date_to_fracyear(date[cand_ind,0],date[cand_ind,1],date[cand_ind,2]) location_cand = np.array(location)[cand_ind] order_cand = order[cand_ind] print len(cand_ind) ################## # Worldview DEMs # ################## stereopairs = shapefile.Reader(os.path.join(os.getenv("DATA_HOME"),"Elevation/Worldview/Orders/20150721_JHK_order/stereo_dg_imagery_index_CC75_N_Gr_front_vm300m")) monopairs = shapefile.Reader(os.path.join(os.getenv("DATA_HOME"),"Elevation/Worldview/Orders/20150721_JHK_order/validpairs")) if glacier == 'Jak': pt = [-185662.0,-2272488.0] elif glacier == 'Helheim': pt = [305905.0,-2576918.0]
bedsource='cresis') x_K, y_K, zb_K, dists_K = glaclib.load_flowline( 'Kanger', shapefilename='flowline_flightline', filt_len=2.0e3, bedsource='cresis') # Get inversion dates times_inv_K = [] times_inv_H = [] DIR = os.path.join(os.getenv("MODEL_HOME"), "Helheim/Results/INV_SSA_ModelT") files = os.listdir(DIR) for file in files: if file.endswith('taub.tif'): times_inv_H.append( datelib.date_to_fracyear(int(file[3:7]), int(file[7:9]), int(file[9:11]))) DIR = os.path.join(os.getenv("MODEL_HOME"), "Kanger/Results/INV_SSA_ModelT") files = os.listdir(DIR) for file in files: if file.endswith('taub.tif'): times_inv_K.append( datelib.date_to_fracyear(int(file[3:7]), int(file[7:9]), int(file[9:11]))) # Ice-front positions terminus_val_H, terminus_time_H = icefrontlib.distance_along_flowline( x_H, y_H, dists_H, 'Helheim', type='icefront', time1=2000., time2=2016.5) terminus_val_K, terminus_time_K = icefrontlib.distance_along_flowline( x_K, y_K, dists_K, 'Kanger', type='icefront', time1=2000., time2=2016.5) # Locations where we want velocities and surface elevations
DIRREGPAR = dir + "/" try: DIRREGPAR except NameError: print "Couldn't find a result with regularization parameter " + regpar + " in directory " + meshname sys.exit() # Boundaries bbed = 4 bsur = 5 # Get date for loading satellite image fid = open(DIRM + "mesh_info.txt", "r") lines = fid.readlines() date = lines[1][7:-1] time = datelib.date_to_fracyear(int(date[0:4]), int(date[4:6]), int(date[6:])) fid.close() del fid, lines # Mesh boundaries extent = np.loadtxt(DIRM + "inputs/mesh_extent.dat") try: hole1 = np.loadtxt(DIRM + "inputs/mesh_hole1.dat") hole2 = np.loadtxt(DIRM + "inputs/mesh_hole2.dat") holes = [hole1, hole2] except: holes = [] # Load data for first regularization parameter bed_3D = elmerreadlib.saveline_boundary(DIRR + DIRREGPAR, method + "_beta.dat", bbed, ['velocity', 'beta'])
def distance_along_flowline(x, y, dists, glacier, type='icefront', imagesource=False, time1=-np.inf, time2=np.inf, datatypes='all'): ''' terminus_val,terminus_time=distance_along_flowline(x,y,dists,glacier,type='icefront',datatypes='all') Find terminus position (or rift position) along a flowline. Inputs: x,y,dists: x and y coordinates for flowline, and distance along the flowline glacier: glacier name type: icefront or rift positions Outputs: terminus_val: distance of terminus along flowline terminus_time: time for each terminus_val ''' if type is 'icefront': DIRI = os.path.join(os.getenv("DATA_HOME"), "ShapeFiles/IceFronts/" + glacier + "/") elif type is 'rift': DIRI = os.path.join(os.getenv("DATA_HOME"), "ShapeFiles/Rifts/" + glacier + "/") if datatypes == 'all': datatypes = ['TSX', 'Landsat7', 'Landsat8', 'WV', 'ASTER'] files = os.listdir(DIRI) terminus_val = [] terminus_time = [] sensorname = [] lineflow = LineString(np.column_stack([x, y])) n = 0 for file in files: intersection = [] if file.endswith('.shp') and (not "moon" in file): sf = shapefile.Reader(DIRI + file) shapes = sf.shapes() for shape in shapes: termpts = np.array(shape.points[:]) # Only look for intersection if there are points in the shape if len(termpts) > 0: lineterm = LineString(termpts) # Find intersection intersect = (lineflow.intersection(lineterm)) if not (intersect.is_empty): if len(intersection) > 0: print "More than one position along the flowline where the ice front intersects." print file else: intersection = np.array([intersect.x, intersect.y]) if len(intersection) > 0: flowind = (abs(x - intersection[0])).argmin() if x[flowind] > intersection[ 0]: #make sure that we have the smaller value flowind = flowind - 1 # Time of that terminus position if (("TSX" in file) or ("moon" in file)) and ('TSX' in datatypes): terminus_val.append(dists[flowind] + ( (intersection[0] - x[flowind])**2 + (intersection[1] - y[flowind])**2)**(0.5)) terminus_time.append( datelib.doy_to_fracyear(float(file[0:4]), float(file[5:8]))) sensorname.append('TSX') elif ("ASTER" in file) and ('ASTER' in datatypes): terminus_val.append(dists[flowind] + ( (intersection[0] - x[flowind])**2 + (intersection[1] - y[flowind])**2)**(0.5)) terminus_time.append( datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10]))) sensorname.append('ASTER') elif ("Landsat7" in file) and ('Landsat7' in datatypes): terminus_val.append(dists[flowind] + ( (intersection[0] - x[flowind])**2 + (intersection[1] - y[flowind])**2)**(0.5)) terminus_time.append( datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10]))) sensorname.append('Landsat7') elif ("Landsat" in file) and ('Landsat7' not in file) and ('Landsat8' in datatypes): terminus_val.append(dists[flowind] + ( (intersection[0] - x[flowind])**2 + (intersection[1] - y[flowind])**2)**(0.5)) terminus_time.append( datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10]))) sensorname.append('Landsat8') elif ("WV" in file) and ('WV' in datatypes): terminus_val.append(dists[flowind] + ( (intersection[0] - x[flowind])**2 + (intersection[1] - y[flowind])**2)**(0.5)) terminus_time.append( datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10]))) sensorname.append('WV') #else: # sys.exit("Don't know that date format for "+file) terminus_time = np.array(terminus_time) terminus_val = np.array(terminus_val) terminus_source = np.array(sensorname) # Need to double check that we imported the same number of times and # terminus values. If we didn't something is horribly wrong. if len(terminus_time) == len(terminus_val): sortind = np.argsort(terminus_time, 0) terminus_time = terminus_time[sortind] terminus_val = terminus_val[sortind] terminus_source = terminus_source[sortind] else: sys.exit( "Length of terminus values and times are different. Something is very wrong" ) # Subset record to cover only a certain time period if time1,time2 are set. ind = np.where((terminus_time > time1) & (terminus_time < time2))[0] terminus_time = terminus_time[ind] terminus_val = terminus_val[ind] terminus_source = terminus_source[ind] # Sometimes we will want to know the image source for each digitized ice front position. if imagesource == False: return terminus_val, terminus_time else: return terminus_val, terminus_time, terminus_source
def load_all(time1, time2, glacier, type='icefront', datatypes='all'): ''' termx,termy,termt = load_all(time1,time2,glacier,type='icefront') Load all terminus positions for the chosen glacier. Inputs: time1,time2: load terminus positions from time1 to time2 glacier: glacier name type: icefront or rift Outputs: termx,termy: array of x,y coordinates of terminus positions for times in termt ''' if type is 'icefront': DIRI = os.path.join(os.getenv("DATA_HOME"), "ShapeFiles/IceFronts/" + glacier + "/") elif type is 'rift': DIRI = os.path.join(os.getenv("DATA_HOME"), "ShapeFiles/Rifts/" + glacier + "/") if datatypes == 'all': datatypes = ['TSX', 'Landsat7', 'Landsat8', 'WV', 'ASTER'] files = os.listdir(DIRI) shapefiles = [] termt = [] for file in files: if file.endswith('.shp') and (not "moon" in file): # Time of that terminus position try: del time except: pass if ('TSX' in file) and ('TSX' in datatypes): time = datelib.doy_to_fracyear(float(file[0:4]), float(file[5:8])) elif ("ASTER" in file) and ('ASTER' in datatypes): time = datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10])) elif ("Landsat7" in file) and ('Landsat7' in datatypes): time = datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10])) elif ("Landsat" in file) and ("Landsat7" not in file) and ('Landsat8' in datatypes): time = datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10])) elif ("WV" in file) and ('WV' in datatypes): time = datelib.date_to_fracyear(float(file[0:4]), float(file[5:7]), float(file[8:10])) try: if (time > time1) and (time < time2): termt.append(time) shapefiles.append(file) except: pass n = len(shapefiles) termx = np.zeros([300, n]) termy = np.zeros([300, n]) termx[:, :] = 'NaN' termy[:, :] = 'NaN' numpoints = 0 for i in range(0, n): numpoints = 0 file = shapefiles[i] # Load shapefile sf = shapefile.Reader(DIRI + file) shapes = sf.shapes() for shape in shapes: try: termpts = np.array(shape.points[:]) if len(termpts[:, 0]) > 0: termx[numpoints:numpoints + len(termpts[:, 0]), i] = termpts[:, 0] termy[numpoints:numpoints + len(termpts[:, 0]), i] = termpts[:, 1] numpoints = numpoints + len(termpts[:, 0]) except: pass # Sort by time sortind = np.argsort(termt) termt = [termt[i] for i in sortind] termx = termx[:, sortind] termy = termy[:, sortind] return termx, termy, termt
def box_method(glacier, imagesource=False, time1=-np.inf, time2=np.inf): DIRI = os.path.join(os.getenv("DATA_HOME"), "ShapeFiles/IceFronts/" + glacier + "/") files = os.listdir(DIRI) # Load shapefile for box sf_box = shapefile.Reader( os.path.join(os.getenv("DATA_HOME"), "ShapeFiles/FluxGates/" + glacier + "/box_method.shp")) shapes_box = sf_box.shapes() # Lines that define back of box pts_north = np.array(shapes_box[0].points) line_north = LineString(pts_north) pts_south = np.array(shapes_box[1].points) line_south = LineString(pts_south) pts_west = np.array(shapes_box[2].points) # Box width width = np.sqrt((pts_west[0][0] - pts_west[1][0])**2 + (pts_west[0][1] - pts_west[1][1])**2) # Set up variables for loading terminus_val = [] terminus_time = [] sensorname = [] n = 0 for file in files: intersect_north = [] intersect_south = [] if file.endswith('.shp') and (not "moon" in file) and (not "Landsat7" in file): sf = shapefile.Reader(DIRI + file) shapes = sf.shapes() if len(shapes) > 1: print "check ", file else: pts_terminus = np.array(shapes[0].points[:]) if pts_terminus[0, 1] > pts_terminus[-1, 1]: pts_terminus = np.flipud(pts_terminus) # Only look for intersection if there are points in the shape if len(pts_terminus) > 0: line_terminus = LineString(pts_terminus) # Find intersection with sides of box intersect_north = (line_north.intersection(line_terminus)) intersect_south = (line_south.intersection(line_terminus)) if intersect_north.is_empty or intersect_south.is_empty: print "Ice front doesn't extend across entire domain ", file else: ind_in_box = np.where( (pts_terminus[:, 1] < intersect_north.y) & (pts_terminus[:, 1] > intersect_south.y))[0] pts_box = np.row_stack([ np.array(pts_west), np.r_[intersect_south.xy], pts_terminus[ind_in_box, :], np.r_[intersect_north.xy] ]) box = Polygon(pts_box) A = box.area # Terminus position terminus_val.append(A / width) # Time of that terminus position if ("TSX" in file) or ("moon" in file): terminus_time.append( datelib.doy_to_fracyear( float(file[0:4]), float(file[5:8]))) sensorname.append('TSX') elif ("ASTER" in file): terminus_time.append( datelib.date_to_fracyear( float(file[0:4]), float(file[5:7]), float(file[8:10]))) sensorname.append('ASTER') elif ("Landsat" in file): terminus_time.append( datelib.date_to_fracyear( float(file[0:4]), float(file[5:7]), float(file[8:10]))) sensorname.append('Landsat8') elif ("WV" in file): terminus_time.append( datelib.date_to_fracyear( float(file[0:4]), float(file[5:7]), float(file[8:10]))) sensorname.append('WV') else: sys.exit("Don't know that date format for " + file) terminus_time = np.array(terminus_time) terminus_val = np.array(terminus_val) terminus_source = np.array(sensorname) # Need to double check that we imported the same number of times and # terminus values. If we didn't something is horribly wrong. if len(terminus_time) == len(terminus_val): sortind = np.argsort(terminus_time, 0) terminus_time = terminus_time[sortind] terminus_val = terminus_val[sortind] terminus_source = terminus_source[sortind] else: sys.exit( "Length of terminus values and times are different. Something is very wrong" ) # Subset record to cover only a certain time period if time1,time2 are set. ind = np.where((terminus_time > time1) & (terminus_time < time2))[0] terminus_time = terminus_time[ind] terminus_val = terminus_val[ind] terminus_source = terminus_source[ind] # Sometimes we will want to know the image source for each digitized ice front position. if imagesource == False: return terminus_val, terminus_time else: return terminus_val, terminus_time, terminus_source
def racmo_interpolate_to_cartesiangrid(x,y,variable,epsg=3413,maskvalues='ice',timing='mean',time1=-np.Inf,time2=np.Inf): ''' Pull all values for RACMO smb, t2m, zs, or runoff values for the region defined by arrays x,y. var,time = racmo_grid(xmin,xmax,ymin,ymax, variable,epsg=3413,mask='ice') Inputs: x,y : grid to interpolate RACMO values onto variable : what variable you want (runoff, t2m, zs, smb) maskvalues : if you want only 'ice' or 'notice' or 'both' values Outputs: var: value of chosen variable at these points time : time ''' # RACMO data if variable != 'zs': files = [(os.path.join(os.getenv("DATA_HOME"),"Climate/RACMO/2015_09_Laura_Kehrl/RACMO2.3_GRN11_"+variable+"_daily_2001_2010.nc")), \ (os.path.join(os.getenv("DATA_HOME"),"Climate/RACMO/2015_09_Laura_Kehrl/RACMO2.3_GRN11_"+variable+"_daily_2011_2014.nc")), \ (os.path.join(os.getenv("DATA_HOME"),"Climate/RACMO/2015_09_Laura_Kehrl/RACMO2.3_GRN11_"+variable+"_daily_2015.nc"))] else: files = [(os.path.join(os.getenv("DATA_HOME"),"Climate/RACMO/ZS_ZGRN_V5_1960-2014_detrended_2day.nc"))] rec1 = netCDF4.Dataset(files[0]) if variable != 'zs': rec2 = netCDF4.Dataset(files[1]) rec3 = netCDF4.Dataset(files[2]) mask = netCDF4.Dataset(os.path.join(os.getenv("DATA_HOME"),"Climate/RACMO/2015_09_Laura_Kehrl/RACMO23_masks_ZGRN11.nc")).variables['icemask'][:] # Load RACMO data lat = np.array(rec1.variables['lat'][:]) lon = np.array(rec1.variables['lon'][:]) var1 = np.array(rec1.variables[variable][:]) daysfrom1950_1 = np.array(rec1.variables['time'][:]) # Load extra data if not variable zs if variable != 'zs': var2 = np.array(rec2.variables[variable][:]) daysfrom1950_2 = np.array(rec2.variables['time'][:]) var3 = np.array(rec3.variables[variable][:]) # Fix var3 units, which are different from var2 and var1 if variable != 't2m': var3 = np.array(var3)/(60*60*24.0) days2015 = np.array(rec3.variables['time'][:]) # Convert date to fractional year startday1950 = jdcal.gcal2jd(1950,1,1) Nt1 = len(daysfrom1950_1) if variable != 'zs': Nt2 = len(daysfrom1950_2) Nt3 = len(days2015) time = np.zeros(Nt1+Nt2+Nt3) for i in range(0,Nt1): year,month,day,fracday = jdcal.jd2gcal(startday1950[0],startday1950[1]+daysfrom1950_1[i]) time[i] = datelib.date_to_fracyear(year,month,day) for i in range(0,Nt2): year,month,day,fracday = jdcal.jd2gcal(startday1950[0],startday1950[1]+daysfrom1950_2[i]) time[i+Nt1] = datelib.date_to_fracyear(year,month,day) for i in range(0,Nt3): time[i+Nt1+Nt2] = datelib.doy_to_fracyear(2015,1+days2015[i]) else: time = daysfrom1950_1 time = time[0:-71] var1 = var1[0:-71,:,:] # Combine variables into same file var = np.row_stack([var1[:,0,:,:],var2[:,0,:,:],var3]) # Convert lat,lon to epsg 3413 xrac,yrac = coordlib.convert(lon,lat,4326,epsg) # Get dimensions of output grid nx = len(x) ny = len(y) if maskvalues == 'ice': ind = np.where((mask == 1)) elif maskvalues == 'notice': ind = np.where((mask == 0)) elif maskvalues == 'both': ind = np.where((mask == 1) | (mask == 0)) xracflat = xrac[ind] yracflat = yrac[ind] # Make a gridded data set from the model output if timing == 'mean': # Make a KD-tree so we can do range searches fast tree = cKDTree(np.column_stack([xracflat,yracflat])) vargrid = np.zeros([len(y),len(x)]) varflat = np.mean(var,axis=0)[ind] timesub = np.mean(time) # For each point in the grid, for i in range(ny): for j in range(nx): L = tree.query_ball_point( (x[j], y[i]), 10.e3 ) # Initialize the weights to 0 weights = 0.0 # For all the nearby model points, for l in L: xp = xracflat[l] yp = yracflat[l] # find the distance to the current point and the # appropriate weight r = np.sqrt( (x[j] - xp)**2 + (y[i] - yp)**2 ) w = (10.e3/(r))**3 weights += w vargrid[i, j] += w * varflat[l] vargrid[i,j] /= weights else: xgrid,ygrid = np.meshgrid(x,y) ind2 = np.where((time >= time1) & (time <= time2))[0] timesub = time[ind2] vargrid = np.zeros([len(timesub),len(y),len(x)]) varsub = var[ind2,:,:] for k in range(0,len(timesub)): varflat = varsub[k,:,:][ind] vargrid[k,:,:] = scipy.interpolate.griddata((yracflat,xracflat),varflat, \ (ygrid.flatten(),xgrid.flatten())).reshape(len(y),len(x)) if variable == 'smb' or variable == 'precip' or variable == 'runoff': # If variable is smb, convert kg m-2 s-1 to kg m-2 d-1 vargrid=vargrid*(60*60*24.0) return timesub,vargrid
len(dates), ]) Hf_fast = np.zeros([ len(dates), ]) Hf_med = np.zeros([ len(dates), ]) times = np.zeros([ len(dates), ]) for i in range(0, len(dates)): for j in range(0, len(DIRs)): times[i] = datelib.date_to_fracyear(int(dates[i][0:4]), int(dates[i][4:6]), int(dates[i][6:])) if 'FS' in DIRs[j] and 'ConstantT' in DIRs[j]: # Get heights x, y, zs_grid = geotifflib.read(DIRs[j] + 'DEM' + dates[i] + '_surf_mea_zs.tif') x, y, zb_grid = geotifflib.read(DIRs[j] + 'DEM' + dates[i] + '_bed_mod_zb.tif') grid = zs_grid - zb_grid grid[ind_cutoff_grid_fast] = np.float('nan') H_fast[i] = np.nanmean(grid) grid = zs_grid - zb_grid grid[ind_cutoff_grid] = np.float('nan') zb_grid[ind_cutoff_grid] = np.float('nan') H_med[i] = np.nanmean(grid[vsurfini_cutoff_SSA < 4000])
# Correct regpar for FS and SSA regpars = ['1e12','1e12','1e13','1e13'] for i in range(0,len(DIRs)): dirs = os.listdir(DIRs[i]) for dir in dirs: area = (shapely.geometry.Polygon(np.loadtxt(DIRs[i]+dir+'/inputs/mesh_extent.dat'))).area if dir.endswith('T'): fid = open(DIRs[i]+dir+'/mesh2d/inversion_adjoint/summary.dat','r') lines = fid.readlines() for line in lines: p = line.split() if p[0] == regpars[i]: dates[i].append(dir[3:11]) times[i].append(datelib.date_to_fracyear(int(dir[3:7]),int(dir[7:9]),int(dir[9:11]))) costs[i].append(float(p[3])) rmses[i].append(np.sqrt(2*float(p[3])/area)) fid.close() fig = plt.figure(figsize=(6.5,1.5)) matplotlib.rc('font',family='Arial') ax1 = plt.gca() plt.plot(SSA_CT_time,SSA_CT_cost,'ks',markersize=4,markerfacecolor='orange',label='SSA-CT') plt.plot(SSA_MT_time,SSA_MT_cost,'ko',markersize=4,markerfacecolor='red',label='SSA-MT') plt.plot(FS_CT_time,FS_CT_cost,'ks',markersize=4,markerfacecolor='cyan',label='FS-CT') plt.plot(FS_MT_time,FS_MT_cost,'ko',markersize=4,markerfacecolor='blue',label='FS-MT') ax1.yaxis.set_major_formatter(matplotlib.ticker.FormatStrFormatter('%.1E')) ax1.tick_params(axis='both',labelsize=8) plt.ylabel(r'$J_o$',fontsize=8,fontname='Arial') ax1.set_xlim([2001,2016])
t2=int(t2)) # Get mesh extent files print "Getting mesh info..." mesh_extent_x = np.loadtxt(DIRM + 'inputs/mesh_timeseries_x.dat') mesh_extent_y = np.loadtxt(DIRM + 'inputs/mesh_timeseries_y.dat') mesh_hole1 = np.loadtxt(DIRM + 'inputs/mesh_hole1.dat') mesh_hole2 = np.loadtxt(DIRM + 'inputs/mesh_hole2.dat') # Get model time print "Getting model time..." fid = open(DIRM + 'mesh_info.txt', 'r') lines = fid.readlines() date1 = lines[1][7:-1] fid.close() model_time = datelib.date_to_fracyear(int(date1[0:4]), int( date1[4:6]), int(date1[6:8])) + ( (np.arange(0, np.shape(model_grid)[2]) - 1) / 365.25) print "Getting model results at sample points..." vel_model = np.zeros([len(model_time), len(dists_eul)]) zs_model = np.zeros([len(model_time), len(dists_eul)]) for i in range(0, len(dists_eul)): ind = np.argmin(abs(dists_eul[i] - dist)) indx = np.argmin(abs(xflow[ind] - xgrid)) indy = np.argmin(abs(yflow[ind] - ygrid)) for j in range(0, len(model_time)): vel_model[j, i] = model_grid['velocity'][indy, indx, j] zs_model[j, i] = model_grid['z'][indy, indx, j] print "Calculating dhdt over an 11-day window and average 11-day velocities"
beta_file_m1_ave = maindir+dir+'/mesh2d/steady_'+regpar+\ '_SSA_average_2000_2016_'+temperature_text+'_linear0001.pvtu' beta_file_m3_ave = maindir+dir+'/mesh2d/steady_'+regpar+\ '_SSA_average_2000_2016_'+temperature_text+'_weertman0001.pvtu' data_m1 = elmerreadlib.pvtu_file(beta_file_m1,['vsurfini',vname,'beta']) surf_m1 = elmerreadlib.values_in_layer(data_m1,'surf') data_m1_ave = elmerreadlib.pvtu_file(beta_file_m1_ave,['vsurfini',vname]) surf_m1_ave = elmerreadlib.values_in_layer(data_m1_ave,'surf') data_m3_ave = elmerreadlib.pvtu_file(beta_file_m3_ave,['vsurfini',vname]) surf_m3_ave = elmerreadlib.values_in_layer(data_m3_ave,'surf') # Get variables times.append(datelib.date_to_fracyear(int(beta_date[0:4]),int(beta_date[4:6]),int(beta_date[6:]))) velocities_obs.append(np.mean(surf_m1['vsurfini'][ind_cutoff])) taub.append(np.mean(surf_m1['taub'][ind_cutoff])) # Get driving stress and interpolate to mesh grid x_taud,y_taud,taud_grid = elmerreadlib.input_file(maindir+dir+'/inputs/taud.xy') x_zs,y_zs,zs_grid = elmerreadlib.input_file(maindir+dir+'/inputs/zsdem.xy') x_zb,y_zb,zb_grid = elmerreadlib.input_file(maindir+dir+'/inputs/zbdem.xy') f = scipy.interpolate.RegularGridInterpolator((y_taud,x_taud),taud_grid) taud.append(np.mean(f((surf_m1['y'][ind_cutoff],surf_m1['x'][ind_cutoff])))) f = scipy.interpolate.RegularGridInterpolator((y_taud,x_taud),zs_grid-zb_grid) H.append(np.mean(f((surf_m1['y'][ind_cutoff],surf_m1['x'][ind_cutoff])))) # Calculate misfits misfit_inversion_mape.append(np.mean(abs(surf_m1['vsurfini'][ind_cutoff]-\ surf_m1[vname][ind_cutoff])/surf_m1['vsurfini'][ind_cutoff])*100)
def load_satimages(glacier,xmin,xmax,ymin,ymax,time1=-np.inf,time2=np.inf,data='all'): ''' images,times,types = load_satimages(glacier,xmin,xmax,ymin,ymax,time1=-np.inf,time2=np.inf,data='all') Load satellite images for a particular glacier for the grid defined by xmin,xmax,ymin, ymax, over the time interval time1 to time2. If time1 == time2, the code will find the sat image closest to the chosen time. ''' DIRLANDSAT = os.path.join(os.getenv("DATA_HOME"),"Imagery/Landsat/"+glacier+"/TIF/") DIRWV = os.path.join(os.getenv("DATA_HOME"),"Imagery/Worldview/"+glacier+"/") DIRTSX = os.path.join(os.getenv("DATA_HOME"),"Mosaics/"+glacier+"/") # Find files to load dirs = [] if time1 == time2: times = 0.0 else: times = [] types = [] images = [] # Check landsat files if ('LANDSAT' in data) or (data=='all'): files = os.listdir(DIRLANDSAT) for file in files: if file.endswith('.tif'): filetime = datelib.date_to_fracyear(float(file[0:4]),float(file[4:6]),float(file[6:8])) if (time1 == time2): if (abs(filetime - times) < abs(time1 - times)): # Last constrain is to prevent unnecessary loading of files, types = 'Landsat' dirs = DIRLANDSAT+file times = filetime elif (filetime >= time1) and (filetime <= time2): types.append('Landsat') dirs.append(DIRLANDSAT+file) times.append(filetime) images.append(geotifflib.read(DIRLANDSAT+file)) # Check TSX files if ('TSX' in data) or (data == 'all'): files = os.listdir(DIRTSX) for file in files: if file.endswith('.tif'): if glacier == 'Helheim': filetime = datelib.doy_to_fracyear(float(file[14:18]),float(file[19:22])) elif glacier == 'Kanger': filetime = datelib.doy_to_fracyear(float(file[11:15]),float(file[16:19])) if (time1 == time2): if (abs(filetime - times) <= abs(time1 - times)) and file.endswith('_1-20mgeo.tif'): types = 'TSX' dirs = DIRTSX+file times = filetime elif (filetime >= time1) and (filetime <= time2): types.append('TSX') dirs.append(DIRLANDSAT+file) times.append(filetime) images.append(geotifflib.read(DIRTSX+file)) if time1 != time2: sortind = np.argsort(times) images_sorted = [] types_sorted = [] times_sorted = [] for ind in sortind: images_sorted.append(images[ind]) types_sorted.append(types[ind]) times_sorted.append(times[ind]) else: images_sorted = geotifflib.read(dirs) times_sorted = times types_sorted = types return images_sorted,times_sorted,types_sorted
def howat_optical_at_pts(xpt, ypt, glacier, xy_velocities='False'): DIR = os.path.join(os.getenv("DATA_HOME"), "Velocity/Howat/" + glacier + "/") dirs = os.listdir(DIR) m = 0 for dir in dirs: if dir.startswith('OPT'): m = m + 1 try: n = len(xpt) except: n = 1 # Set up variables velocities = np.zeros([m, n]) velocities[:, :] = 'nan' velocities_x = np.zeros([m, n]) velocities_x[:, :] = 'nan' velocities_y = np.zeros([m, n]) velocities_y[:, :] = 'nan' error = np.zeros([m, n]) error[:, :] = 'nan' times = np.zeros([m, 2]) count = 0 for dir in dirs: if dir.startswith('OPT'): metafile = open(DIR + dir + '/' + dir + '.meta', "r") lines = metafile.readlines() metafile.close() jdates = [] jdates.append(float(lines[0][36:48])) if len(lines[0]) > 50: jdates.append(float(lines[0][49:61])) if len(lines[0]) > 63: jdates.append(float(lines[0][62:74])) if len(lines[0]) > 75: jdates.append(float(lines[0][75:87])) if len(lines[0]) > 88: jdates.append(float(lines[0][88:100])) if len(lines[0]) > 101: jdates.append(float(lines[0][101:113])) # Get date times_all = [] for jdate in jdates: year, month, day, fracday = jdcal.jd2gcal(jdate, 0) times_all.append( datelib.date_to_fracyear(year, month, day + fracday)) times[count, 0] = np.mean(times_all) times[count, 1] = np.max(times_all) - np.min(times_all) x, y, vx = geotifflib.read(DIR + dir + '/' + dir + '.vx.tif', no_data_value=-99999) x, y, vy = geotifflib.read(DIR + dir + '/' + dir + '.vy.tif', no_data_value=-99999) x, y, ex = geotifflib.read(DIR + dir + '/' + dir + '.ex.tif', no_data_value=-99999) x, y, ey = geotifflib.read(DIR + dir + '/' + dir + '.ey.tif', no_data_value=-99999) v = np.sqrt(vx**2 + vy**2) fv = scipy.interpolate.RegularGridInterpolator([y, x], v, method='linear', bounds_error=False) fex = scipy.interpolate.RegularGridInterpolator([y, x], ex, method='linear', bounds_error=False) fey = scipy.interpolate.RegularGridInterpolator([y, x], ey, method='linear', bounds_error=False) fvx = scipy.interpolate.RegularGridInterpolator([y, x], vx, method='linear', bounds_error=False) fvy = scipy.interpolate.RegularGridInterpolator([y, x], vy, method='linear', bounds_error=False) # Find velocities velocities[count, :] = fv(np.array([ypt, xpt]).T) velocities_x[count, :] = fvx(np.array([ypt, xpt]).T) velocities_y[count, :] = fvy(np.array([ypt, xpt]).T) error[count, :] = velocities[count, :] * np.sqrt( (fex(np.array([ypt, xpt]).T) / fvx(np.array([ypt, xpt]).T))**2 + (fey(np.array([ypt, xpt]).T) / fvy(np.array([ypt, xpt]).T))**2) count = count + 1 # Sort arrays by time sortind = np.argsort(times[:, 0], 0) tpt_sort = times[sortind] vpt_sort = velocities[sortind, :] ept_sort = error[sortind, :] vxpt_sort = velocities_x[sortind, :] vypt_sort = velocities_y[sortind, :] if xy_velocities == 'True': return vpt_sort, tpt_sort, ept_sort, vxpt_sort, vypt_sort else: return vpt_sort, tpt_sort, ept_sort
# Make overview figure for Helheim/Kanger inversions. # # Laura Kehrl, 10 April 2018 import os import numpy as np import cubehelix, matplotlib import masklib, inverselib, geotifflib, datelib, glaclib, vellib import matplotlib.pyplot as plt from mpl_toolkits.basemap import Basemap # Cutoff for velocities velocity_cutoff = 1000 #m/yr # Image for plotting imagetime_HG = datelib.date_to_fracyear(2014, 7, 4) ximage_HG,yimage_HG,image_HG = geotifflib.readrgb(os.path.join(os.getenv("DATA_HOME"),\ "Imagery/Landsat/Helheim/TIF/20140704140535_LC82330132014185LGN00.tif")) imagetime_KG = datelib.date_to_fracyear(2014, 7, 6) ximage_KG,yimage_KG,image_KG = geotifflib.readrgb(os.path.join(os.getenv("DATA_HOME"),\ "Imagery/Landsat/Kanger/TIF/20140706135251_LC82310122014187LGN00.tif")) # Glacier extents for inversions extent_HG = np.loadtxt('/Users/kehrl/Models/Helheim/3D/INV_SSA_ModelT/' + 'DEM20120316_modelT_Lcurve/inputs/mesh_extent.dat') hole1_HG = np.loadtxt('/Users/kehrl/Models/Helheim/3D/INV_SSA_ModelT/' + 'DEM20120316_modelT_Lcurve/inputs/mesh_hole1.dat') hole2_HG = np.loadtxt('/Users/kehrl/Models/Helheim/3D/INV_SSA_ModelT/' + 'DEM20120316_modelT_Lcurve/inputs/mesh_hole2.dat') extent_KG = np.loadtxt('/Users/kehrl/Models/Kanger/3D/INV_SSA_ModelT/' +
"ShapeFiles/Glaciers/3D/" + glacier + "/") inputs = os.path.join(DIRM + "inputs/") # Make mesh directories if not (os.path.isdir(DIRM)): os.makedirs(DIRM) os.makedirs(DIRM + "/inputs") os.makedirs(DIRM + "/figures") # Densities for finding floating ice rho_i = 917.0 rho_sw = 1025.0 yearinsec = 365.25 * 24 * 60 * 60 # Time time1 = datelib.date_to_fracyear(int(date1[0:4]), int(date1[4:6]), int(date1[6:8])) ################# # Mesh Geometry # ################# # Mesh exterior if meshshp.endswith('_nofront') or meshshp.endswith('_nofront.shp'): if timeseries == True: if len(date2) < 8: sys.exit( "Need an end date (-d2) to calculate a timeseries of meshes.") time2 = datelib.date_to_fracyear(int(date2[0:4]), int(date2[4:6]), int(date2[6:8])) print "Calculating timeseries of meshes from " + date1 + " to " + date2 times,xextents,yextents,bounds,icefronts_x,icefronts_y,icefronts_advance = glaclib.load_extent_timeseries(glacier,\