コード例 #1
0
ファイル: vellib.py プロジェクト: tarynblack/big3
def tsx_near_time(time, glacier, just_filename=False):
    '''
  
  x,y,vx,vy,v,time = tsx_near_time(time,glacier,just_filename = False)
  
  Find TSX data closest to "time".
  
  Inputs:
  time: time that you want data
  glacier: glacier name (Kanger or Helheim)
  just_filename: option to only return the filename

  Outputs:
  x,y: grid coordinates
  vx,vy: x and y velocities
  v: velocity magnitudes for gird
  time: time of transect

  '''

    DIR_TSX = os.path.join(os.getenv("DATA_HOME"),
                           "Velocity/TSX/" + glacier + "/")

    DIRs = os.listdir(DIR_TSX)
    tpt = []

    best_track = []
    min_diff = 1.0
    for DIR in DIRs:
        if DIR.startswith('track'):
            tsx_time, interval = geodatlib.readtime(DIR_TSX + DIR +
                                                    "/mosaicOffsets")
            if abs(tsx_time - time) < min_diff:
                min_diff = abs(tsx_time - time)
                best_time = tsx_time
                best_track = DIR

    if just_filename:
        year, month, day = datelib.fracyear_to_date(best_time)

        return DIR_TSX + 'TIF/' + best_track + '_' + "%04d%02d%02d" % (
            year, month, day), best_time

    else:
        # Return the closest velocity profile
        x, y, v, vx, vy, ex, ey, time, interval = geodatlib.readvelocity(
            DIR_TSX, best_track, "/mosaicOffsets")

        return x, y, vx, vy, v, time
コード例 #2
0
def readvelocity(DIR, track, file):
    '''
  x,y,v,vx,vy,ex,ey,time,interval = readvelocity(DIR,track,file)
  '''

    # Filename
    filename = DIR + track + "/" + file
    # Get time
    if os.path.exists(filename + ".meta"):
        time, interval = readtime(filename)
        year, month, day = datelib.fracyear_to_date(time)
        date = "%04d%02d%02d" % (year, month, day)
    else:
        time = float('NaN')
        interval = float('NaN')
        date = float('NaN')

    if os.path.isfile(DIR + "TIF/" + track + "_v.tif"):
        x, y, v = geotifflib.read(DIR + "TIF/" + track + "_v.tif")
        x, y, vx = geotifflib.read(DIR + "TIF/" + track + "_vx.tif")
        x, y, vy = geotifflib.read(DIR + "TIF/" + track + "_vy.tif")
        x, y, ex = geotifflib.read(DIR + "TIF/" + track + "_ex.tif")
        x, y, ey = geotifflib.read(DIR + "TIF/" + track + "_ey.tif")
        # Check to see if there are geotiff files in the subdirectory. If not, read the binary data.
    elif os.path.isfile(DIR + "TIF/" + track + "_" + date + "_v.tif"):
        x, y, v = geotifflib.read(DIR + "TIF/" + track + "_" + date + "_v.tif")
        x, y, vx = geotifflib.read(DIR + "TIF/" + track + "_" + date +
                                   "_vx.tif")
        x, y, vy = geotifflib.read(DIR + "TIF/" + track + "_" + date +
                                   "_vy.tif")
        x, y, ex = geotifflib.read(DIR + "TIF/" + track + "_" + date +
                                   "_ex.tif")
        x, y, ey = geotifflib.read(DIR + "TIF/" + track + "_" + date +
                                   "_ey.tif")
    else:
        print "Unpacking binary velocity file ", track
        x, y, v, vx, vy, ex, ey, time, interval = readbinary(filename)

    return (x, y, v, vx, vy, ex, ey, time, interval)
コード例 #3
0
ファイル: glaclib.py プロジェクト: tarynblack/big3
def load_extent_timeseries(glacier,time1,time2,dt,nofront_shapefile='glacier_extent_nofront',datatypes=['Landsat','TSX','WV']):

  '''
  time, xextents, yxextents, bounds = load_extent_timeseries(glacier,time1,
     time2,dt,nofront_shapefile='glacier_extent_nofront',
     datatypes=['Landsat','TSX','WV'])
     
  Interpolates ice-front positions from picked ice-front positions to create 
  a timeseries of meshes to be used in the terminus-driven model.
  
  Inputs:
  glacier           : glacier name (Helheim, Kanger)
  time1             : fractional start time for timeseries
  time2             : fractional end time for timeseries
  dt                : timestep
  nofront_shapefile : nofront shapefile name for mesh extent
  datatypes         : satellite image types for picked ice fronts
  
  Outputs:
  time     : interpolated time between time1,time2 with timestep dt
  xextents : 2-d array of x-coordinates of extents
  yextents : 2-d array of y-coordinates of extents
  bounds   : boundary numbers for extents
  '''

  # Glacier extent with no ice front
  extent = meshlib.shp_to_xy(os.path.join(os.getenv("DATA_HOME"),"ShapeFiles/Glaciers/3D/"+glacier+"/"+nofront_shapefile))
  if extent[1,1] > extent[0,1]:
    extent = np.flipud(extent)
  
  xextent = extent[:,0]
  yextent = extent[:,1]
  bound_extent = extent[:,2]

  # Interpolate glacier extent to a finer grid to make ice-front interpolation easier
  dextent = distlib.transect(xextent,yextent)
  #dextent = np.arange(0,dold[-1],20.0)
  #xextent = np.interp(dextent,dold,xextent)
  #yextent = np.interp(dextent,dold,yextent)
  #f = scipy.interpolate.interp1d(dold,bound,kind='nearest')
  #bound = f(dextent)
  extent = LineString(np.column_stack([extent[:,0:2]]))

  # Load all ice front positions for that time period
  termx,termy,termt = icefrontlib.load_all(time1-0.5,time2+0.5,glacier,type='icefront',datatypes=datatypes)

  # In case we have multiple ice front picks for the same day, we want to
  # use only one of those for continuity
  [junk,ind] = np.unique(termt,return_index=True)
  termt = np.array(termt)[ind]
  termx = termx[:,ind]
  termy = termy[:,ind]

  # Load a velocity profile to use as interpolation direction to figure out ice-front position
  # on timesteps that fall between picked ice fronts
  x,y,u = geotifflib.read(os.path.join(os.getenv("DATA_HOME"),"Velocity/TSX/"+glacier+"/TIF/all-2008-2016_vx.tif"))
  x,y,v = geotifflib.read(os.path.join(os.getenv("DATA_HOME"),"Velocity/TSX/"+glacier+"/TIF/all-2008-2016_vy.tif"))  

  fu = scipy.interpolate.RegularGridInterpolator((y,x),u)
  fv = scipy.interpolate.RegularGridInterpolator((y,x),v)

  # Get ice-front position for each timestep
  time = np.arange(time1,time2+dt,dt)
  timeseries_x = np.zeros([len(termx[:,0]),len(time)])
  timeseries_y = np.zeros([len(termx[:,0]),len(time)])
  timeseries_advance = np.zeros([len(termx[:,0]),len(time)])
  timeseries_dist = np.zeros([len(termx[:,0]),len(time)])  
  timeseries_x[:,:] = float('nan')
  timeseries_y[:,:] = float('nan')
  timeseries_advance[:,:] = float('nan')
  timeseries_dist[:,:] = float('nan')
  xextents = np.zeros([len(termx[:,0])+len(xextent),len(time)])
  yextents = np.zeros([len(termx[:,0])+len(xextent),len(time)])
  bounds = np.zeros([len(termx[:,0])+len(xextent),len(time)])
  for i in range(0,len(time)):
    # Find picked ice-front positions for before and after timestep for the interpolation
    ind = np.argmin(abs(time[i]-termt))
    if termt[ind] < time[i]:
      ind1 = ind
      ind2 = ind+1
    else:
      ind1 = ind-1
      ind2 = ind
    
    # Fractional time between ind1,ind2 to use for interpolation
    frac = (time[i]-termt[ind1])/(termt[ind2]-termt[ind1])
    
    # Get picked ice-front positions that we will use for the interpolation
    nonnan = np.where(~(np.isnan(termx[:,ind1])))[0]
    termx1 = termx[nonnan,ind1]
    termy1 = termy[nonnan,ind1]
    nonnan = np.where(~(np.isnan(termx[:,ind2])))[0]
    termx2 = termx[nonnan,ind2]
    termy2 = termy[nonnan,ind2]   
    
    if termy1[-1] > termy1[0]:
      termx1 = np.flipud(termx1)
      termy1 = np.flipud(termy1)
    if termy2[-1] > termy2[0]:
      termx2 = np.flipud(termx2)
      termy2 = np.flipud(termy2)
  
    # Get locations where interpolate ice front intersects the glacier extent
    # First, get intersection pts for two closest ice-front positions in time
    term1 = LineString(np.column_stack([termx1,termy1]))
    intersect = extent.intersection(term1)
    try:
      if len(intersect) == 2:
        if intersect[0].y > intersect[1].y:
          top1 = [intersect[0].x,intersect[0].y]
          bot1 = [intersect[1].x,intersect[1].y]
        else:
          top1 = [intersect[1].x,intersect[1].y]
          bot1 = [intersect[0].x,intersect[0].y]
      else:
        print "Need to look at date ", datelib.fracyear_to_date(termt[ind1])
    except:
      print "Need to look at date ", datelib.fracyear_to_date(termt[ind1])
    
    term2 = LineString(np.column_stack([termx2,termy2]))
    intersect = extent.intersection(term2)
    try:
      if len(intersect) == 2:
        if intersect[0].y > intersect[1].y:
          top2 = [intersect[0].x,intersect[0].y]
          bot2 = [intersect[1].x,intersect[1].y]
        else:
          top2 = [intersect[1].x,intersect[1].y]
          bot2 = [intersect[0].x,intersect[0].y]
      else:
        print "Need to look at date ", datelib.fracyear_to_date(termt[ind2])
    except:
      print "Need to look at date ", datelib.fracyear_to_date(termt[ind2])
    # Now find new intersection points
    if top1[0] < top2[0]: # advancing on this side
      ind_top = np.where((xextent > top1[0]) & (xextent < top2[0]) & (abs(top1[1]-yextent) < 500.))[0]
      sortind = np.argsort(xextent[ind_top])
      xtops = np.r_[top1[0],xextent[ind_top[sortind]],top2[0]]
      ytops = np.r_[top1[1],yextent[ind_top[sortind]],top2[1]]
      dtops = distlib.transect(xtops,ytops)
      dtop = dtops[-1]*frac
    elif top1[0] > top2[0]: # retreating on this side
      ind_top = np.where((xextent < top1[0]) & (xextent > top2[0]) & (abs(top1[1]-yextent) < 500.))[0]
      sortind = np.argsort(xextent[ind_top])
      xtops = np.r_[top2[0],xextent[ind_top[sortind]],top1[0]]
      ytops = np.r_[top2[1],yextent[ind_top[sortind]],top1[1]]
      dtops = distlib.transect(xtops,ytops)
      dtop = dtops[-1]*(1-frac)
    else:
      print "not advancing or retreating on top"
    xtop = np.interp(dtop,dtops,xtops)
    ytop = np.interp(dtop,dtops,ytops)
      
    if bot1[0] < bot2[0]: # advancing on this side
      ind_bot = np.where((xextent > bot1[0]) & (xextent < bot2[0]) & (abs(bot1[1]-yextent) < 500.))[0]
      sortind = np.argsort(xextent[ind_bot])
      xbots = np.r_[bot1[0],xextent[ind_bot[sortind]],bot2[0]]
      ybots = np.r_[bot1[1],yextent[ind_bot[sortind]],bot2[1]]
      dbots= distlib.transect(xbots,ybots)
      dbot = (dbots[-1])*frac
    elif bot1[0] > bot2[0]: # retreating on this side
      ind_bot = np.where((xextent < bot1[0]) & (xextent > bot2[0]) & (abs(bot1[1]-yextent) < 500.))[0]
      sortind = np.argsort(xextent[ind_bot])
      xbots = np.r_[bot2[0],xextent[ind_bot[sortind]],bot1[0]]
      ybots = np.r_[bot2[1],yextent[ind_bot[sortind]],bot1[1]]
      dbots= distlib.transect(xbots,ybots)
      dbot = (dbots[-1])*(1-frac)
    else:
      print "not advancing or retreating on bot"

    xbot = np.interp(dbot,dbots,xbots)
    ybot = np.interp(dbot,dbots,ybots)

    # Now that we know the bottom and top points (extent of the ice front), we can start
    # calculating the shape, again based on linear interpolation
    # May need to change next expression to find indices between the sidewalls for Kanger, 
    # but this should work for Helheim
    ind_term1 = np.where((termy1 > bot1[1]) & (termy1 < top1[1]))[0]
    icefront_x = []
    icefront_y = []
    advance = []
    icefront_x.append(xtop)
    icefront_y.append(ytop)
    if i > 0:
      nonnan = np.where(~(np.isnan(xextents[:,i-1])))[0]
      extentpath = Path(np.column_stack([xextents[nonnan,i-1],yextents[nonnan,i-1]]))
      if extentpath.contains_point([xtop,ytop]) or (xtop < xtop_old):
        sign = -1
      else:
        sign = 1
      advance.append(sign*distlib.between_pts(xtop,ytop,xtop_old,ytop_old))
    else:
      advance.append(0)
    for j in ind_term1:
      # Get velocities to create a line, to interpolate between ice fronts
      uj = fu((termy1[j],termx1[j]))
      vj = fv((termy1[j],termx1[j]))
    
      # Create flowline that intersects that point of the ice front
      xunit = uj/(np.sqrt(uj**2+vj**2))
      yunit = vj/(np.sqrt(uj**2+vj**2))
      b = termy1[j] - (yunit/xunit)*termx1[j]
      flowlinex = np.arange(-3000.,3005.,10) + termx1[j]
      flowliney = (yunit/xunit)*flowlinex + b
      flowline = LineString(np.column_stack([flowlinex,flowliney]))
    
      # Find where flowline intersects the next ice-front position
      intersect = flowline.intersection(term2)
      add = False
      try:   
        if len(intersect) > 0: 
          ind = np.argmin(abs([intersect[k].x for k in range(0,len(intersect))]-termx1[j]))
          term2_flowline = [intersect[ind].x,intersect[ind].y]
          add = True
      except:
        try:
          term2_flowline = [intersect.x,intersect.y]
          add = True
        except:
          pass
      dflow = distlib.between_pts(termx1[j],termy1[j],term2_flowline[0],term2_flowline[1])
      dmid = frac*dflow
      xmid = np.interp(dmid,[0,dflow],[termx1[j],term2_flowline[0]])
      ymid = np.interp(dmid,[0,dflow],[termy1[j],term2_flowline[1]])
      if (add == True) and (ymid > ybot) and (ymid < ytop):  
        icefront_x.append(xmid)
        icefront_y.append(ymid)
        if i > 0:
          nonnan = np.where(~(np.isnan(timeseries_x[:,i-1])))[0]
          front_lasttime = LineString(np.column_stack([timeseries_x[nonnan,i-1],timeseries_y[nonnan,i-1]]))
          intersect = flowline.intersection(front_lasttime)
          try:
            diff = distlib.between_pts(xmid,ymid,intersect.x,intersect.y)
          except:
            try:
              diff = 1000.0
              for pt in intersect:
                newdiff = distlib.between_pts(xmid,ymid,pt.x,pt.y)
                if newdiff < diff:
                  diff = newdiff
              if diff == 1000.0:
                diff = 0
            except:
              diff = 0
          if extentpath.contains_point([xmid,ymid]):
              sign = -1
          else:
            sign = 1
          advance.append(sign*diff)
        else:
          advance.append(0)
    
    icefront_x.append(xbot)
    icefront_y.append(ybot)
    if i > 0:
      if extentpath.contains_point([xbot,ybot]) or (xbot < xbot_old):
        sign = -1
      else:
        sign = 1
      advance.append(sign*distlib.between_pts(xbot,ybot,xbot_old,ybot_old))
    else:
      advance.append(0)
    
    # Try sorting icefront to get rid of potential tangles
    icefront_x_old = np.asarray(icefront_x)
    icefront_y_old = np.asarray(icefront_y)
    advance_old = np.asarray(advance)
    icefront_x = np.zeros_like(icefront_x_old)
    icefront_y = np.zeros_like(icefront_y_old)
    advance = np.zeros_like(advance_old)
    icefront_x[0] = icefront_x_old[0]
    icefront_y[0] = icefront_y_old[0]
    advance[0] = advance_old[0]
    ind = range(1,len(icefront_x_old))
    for k in range(1,len(icefront_x_old)):
      mindist = 10000.
      for j in range(0,len(ind)):
        dist = distlib.between_pts(icefront_x[k-1],icefront_y[k-1],icefront_x_old[ind[j]],icefront_y_old[ind[j]])
        if dist < mindist:
          mindist = dist
          minind = ind[j]
      icefront_x[k] = icefront_x_old[minind]
      icefront_y[k] = icefront_y_old[minind]
      advance[k] = advance_old[minind]
      ind.remove(minind)
    
    # Save icefront in timeseries 
    timeseries_x[0:len(icefront_x),i] = icefront_x
    timeseries_y[0:len(icefront_y),i] = icefront_y
    timeseries_dist[0:len(icefront_x),i] = distlib.transect(icefront_x,icefront_y)
    timeseries_advance[0:len(icefront_x),i] = advance
    
    # Now create mesh extent and BC numbers using the interpolated ice front
    boundterminus = np.ones(len(icefront_x))*2.0
    ind1 = np.where((xextent > xbot) & (abs(yextent - ybot) < 1.0e3))[0][0] 
    ind2 = np.where((xextent > xtop) & (abs(yextent - ytop) < 1.0e3))[0][-1]

    extent_x = np.r_[xextent[0:ind1],np.flipud(icefront_x),xextent[ind2+1:]]
    extent_y = np.r_[yextent[0:ind1],np.flipud(icefront_y),yextent[ind2+1:]]
    bound = np.r_[bound_extent[0:ind1],boundterminus,bound_extent[ind2+1:]]
    
    xextents[0:len(extent_x),i] = extent_x
    yextents[0:len(extent_x),i] = extent_y
    bounds[0:len(extent_x),i] = bound
    
    xtop_old = float(xtop)
    ytop_old = float(ytop)
    xbot_old = float(xbot)
    ybot_old = float(ybot)
      
    
  return  time, xextents, yextents, bounds, timeseries_x, timeseries_y, timeseries_advance
コード例 #4
0
                                                  method='linear',
                                                  fill_value=float('nan'))
    plt.plot(dists / 1e3, floatlib.height(zb), 'k:')
    plt.plot(dists / 1e3, f((y, x)), 'b', lw=1.5)
    plt.plot(dists / 1e3, floatlib.shelfbase(f(((y, x)))), 'b', lw=1.5)
    if glacier == 'Helheim':
        plt.ylim([-1200, 220])
        plt.xticks(range(-10, 5, 2), fontsize=10)
        plt.yticks(np.arange(-1200, 400, 200), fontsize=10)
        plt.xlim([-7, 4])
    elif glacier == 'Kanger':
        plt.ylim([-1200, 220])
        plt.xticks(np.arange(-10, 5, 2), fontsize=10)
        plt.yticks(np.arange(-1200, 400, 200), fontsize=10)
        plt.xlim([-7, 4])
    year, month, day = datelib.fracyear_to_date(timedem[i])
    plt.title(str(year) + '-' + str(month) + '-' + str(int(day)))

    plt.tight_layout()
    plt.subplots_adjust(hspace=10, wspace=0)

    plt.savefig(os.path.join(
        os.getenv("HOME"),
        "Bigtmp/model_movie/" + '{0:02g}'.format(i) + '.png'),
                format='PNG',
                dpi=400)
    plt.close()

##############
# Make movie #
##############
コード例 #5
0
                p = np.polyfit(model_time[ind[nonnan]],
                               model_grid_zs[j, ind[nonnan]].T, 1) / 365.25
                model_grid_dhdt[j, k] = p[0]

    vel_model_11day[k, :] = np.mean(vel_model[ind, :], axis=0)

dhdt = np.reshape(model_grid_dhdt, [len(ygrid), len(xgrid), len(model_time)])

print "Making plots..."
cx = cubehelix.cmap(start=1.0, rot=-1.1, reverse=True, minLight=0.05, sat=2)
colors = ['r', 'b', 'g', 'limegreen', 'gold', 'k']

for i in range(1, len(model_time)):
    fig = plt.figure(figsize=(5.5, 4.2))
    gs = matplotlib.gridspec.GridSpec(6, 2)
    year, month, day = datelib.fracyear_to_date(model_time[i])

    plt.subplot(gs[0:3, 0])
    ax = plt.gca()
    p = plt.imshow(model_grid['velocity'][:,:,i]/365.25,extent=[np.min(xgrid),np.max(xgrid),\
       np.min(ygrid),np.max(ygrid)],origin='lower',cmap=cx,clim=[0,22])
    plt.contour(model_grid['velocity'][:,:,i]/365.25,extent=[np.min(xgrid),np.max(xgrid),\
       np.min(ygrid),np.max(ygrid)],origin='lower',levels=np.arange(0,25,5),cmap=cx,linewidths=1)
    plt.plot(model_gl['x'][:, i], model_gl['y'][:, i], 'w.', markersize=0.5)
    plt.plot(np.r_[mesh_hole1[:, 0], mesh_hole1[0, 0]],
             np.r_[mesh_hole1[:, 1], mesh_hole1[0, 1]],
             'k',
             linewidth=0.75,
             zorder=2)
    plt.plot(np.r_[mesh_hole2[:, 0], mesh_hole2[0, 0]],
             np.r_[mesh_hole2[:, 1], mesh_hole2[0, 1]],
コード例 #6
0
# Find dimensions for plot
ncol = 0
for year in years:
  ndem = len(np.where(np.floor(timewv)==year)[0])
  if ndem > ncol:
    ncol = ndem
nrow = len(years)

# Make plot
plt.figure(figsize=(ncol*2.8,nrow*3))
cmap = matplotlib.cm.get_cmap('gray_r',4)
gs = matplotlib.gridspec.GridSpec(nrow,ncol)
gs.update(right=0.94,left=0.02,wspace=0.04,hspace=0.04)
for j in range(0,nrow):
  year = years[j]
  for i in range(0,len(np.where(np.floor(timewv)==year)[0])):
    plt.subplot(gs[j,i])
    wvind = np.where(np.floor(timewv)==year)[0][i]
    date = datelib.fracyear_to_date(timewv[wvind])
    shadeddem = demshadelib.set_shade(zwv[:,:,wvind],0,220)
    plt.imshow(shadeddem,extent=[np.min(xwv),np.max(xwv),np.min(ywv),np.max(ywv)],clim=[0,200],origin='lower')
    #plt.scatter(xf,yf,c=zabovefloat[:,wvind],s=3.0**2.0,cmap='gist_gray_r',edgecolors='none',vmin=-10,vmax=10)
    plt.xlim([np.min(xwv),np.max(xwv)])
    plt.ylim([np.min(ywv),np.max(ywv)])
    plt.xticks([])
    plt.yticks([])
    plt.text(xmin+500,ymax-1.25e3,str(date[0])+'-'+str(date[1])+'-'+str(int(np.floor(date[2]))),backgroundcolor='w',fontsize=8)
    
plt.savefig("/Users/kehrl/Bigtmp/"+glacier+"_dems.png",format='PNG',dpi=150)
plt.close()
コード例 #7
0
 imageind = np.argmin(abs(imagetimes - zstimedem[zsind[0]]))
 ax = plt.gca()
 plt.imshow(images[imageind][2],
            extent=[
                np.min(images[imageind][0]),
                np.max(images[imageind][0]),
                np.min(images[imageind][1]),
                np.max(images[imageind][1])
            ],
            origin='lower',
            cmap='Greys_r')
 plt.xticks([])
 plt.yticks([])
 plt.xlim([xmin, xmax])
 plt.ylim([ymin, ymax])
 year, month, day = datelib.fracyear_to_date(imagetimes[imageind])
 path = matplotlib.path.Path(
     [[0.02 * (xmax - xmin) + xmin, 0.82 * (ymax - ymin) + ymin],
      [0.02 * (xmax - xmin) + xmin, 0.98 * (ymax - ymin) + ymin],
      [0.38 * (xmax - xmin) + xmin, 0.98 * (ymax - ymin) + ymin],
      [0.38 * (xmax - xmin) + xmin, 0.82 * (ymax - ymin) + ymin],
      [0.02 * (xmax - xmin) + xmin, 0.82 * (ymax - ymin) + ymin]])
 patch = matplotlib.patches.PathPatch(path,
                                      edgecolor='k',
                                      facecolor='w',
                                      lw=1,
                                      zorder=3)
 ax.add_patch(patch)
 plt.text(xmin + 0.04 * (xmax - xmin),
          0.92 * (ymax - ymin) + ymin,
          '{0:4d}-{1:02d}-{2:02d}'.format(year, month, int(np.round(day))),
コード例 #8
0
ファイル: vellib.py プロジェクト: tarynblack/big3
def inversion_3D(glacier,
                 x,
                 y,
                 time,
                 dir_velocity_out='none',
                 blur=False,
                 dx='none'):
    '''
  Inputs:
  x : list of x coordinates for grid interpolation
  y : list of y coordinates for grid interpolation
  time : primary time for velocities, which will be filled in with other data
  file_velocity_in : velocity file for interpolation
  dir_velocity_out : directory for outputting the velocity
  
  Outputs:
  u : velocity in x-dir on grid defined by x,y
  y : velocity in y-dir on grid defined by x,y
  '''

    xmin = np.min(x) - 5.0e3
    xmax = np.max(x) + 5.0e3
    ymin = np.min(y) - 5.0e3
    ymax = np.max(y) + 5.0e3

    OUTDIR = os.path.join(os.getenv("DATA_HOME"),
                          "Velocity/MosaicVelocities/" + glacier)

    # Large velocity map to fill in gaps in smaller velocity map
    file_velocity_all = os.path.join(
        os.getenv("DATA_HOME"),
        "Velocity/TSX/" + glacier + "/TIF/all-2008-2016")

    # If the region is bigger than what is covered by the TSX stripmaps, then we use Ian's big
    # inSAR velocity map
    file_velocity_global = os.path.join(
        os.getenv("DATA_HOME"),
        "Velocity/Random/Greenland/AllGLVel/mosaicOffsets")

    year, month, day = datelib.fracyear_to_date(time)
    date = "%04d%02d%02d" % (year, month, day)

    # Individual velocity map for time step
    if time <= 2008:
        HOWATDIR = os.path.join(os.getenv("DATA_HOME"),
                                "Velocity/Howat/" + glacier + "/")
        # Use Howat velocity maps
        print date
        if glacier == 'Kanger':
            if '200707' in date:
                filename1 = HOWATDIR + "OPT_E68.80N_2007-08/OPT_E68.80N_2007-08"
                filename2 = HOWATDIR + "OPT_E68.80N_2007-07/OPT_E68.80N_2007-07"
            elif '200107' in date:
                filename1 = HOWATDIR + "OPT_E68.80N_2001-07/OPT_E68.80N_2001-07"
                filename2 = filename1
            elif ('200308' in date) or ('200307' in date):
                filename1 = HOWATDIR + "OPT_E68.80N_2003-07/OPT_E68.80N_2003-07"
                filename2 = HOWATDIR + "OPT_E68.80N_2001-07/OPT_E68.80N_2001-07"
            elif '200506' in date:
                filename1 = HOWATDIR + "OPT_E68.80N_2005-06/OPT_E68.80N_2005-06"
                filename2 = filename1
            elif '200508' in date:
                filename1 = HOWATDIR + "OPT_E68.80N_2005-08/OPT_E68.80N_2005-08"
                filename2 = filename1
            elif '200605' in date:
                filename2 = HOWATDIR + "OPT_E68.80N_2006-05/OPT_E68.80N_2006-05"
                filename1 = HOWATDIR + "OPT_E68.80N_2006-04/OPT_E68.80N_2006-04"
            elif '200607' in date:
                filename2 = HOWATDIR + "OPT_E68.80N_2006-07/OPT_E68.80N_2006-07"
                filename1 = HOWATDIR + "OPT_E68.80N_2006-06/OPT_E68.80N_2006-06"
            elif '200609' in date:
                filename1 = HOWATDIR + "OPT_E68.80N_2006-09/OPT_E68.80N_2006-09"
                filename2 = filename1
        elif glacier == 'Helheim':
            if '200709' in date:
                filename1 = HOWATDIR + "OPT_E66.50N_2007-08/OPT_E66.50N_2007-08"
                filename2 = HOWATDIR + "OPT_E66.50N_2007-09/OPT_E66.50N_2007-09"
            elif '200408' in date:
                filename1 = HOWATDIR + "OPT_E66.50N_2004-08/OPT_E66.50N_2004-08"
                filename2 = HOWATDIR + "OPT_E66.50N_2004-07/OPT_E66.50N_2004-07"
            elif '200508' in date:
                filename1 = HOWATDIR + "OPT_E66.50N_2005-09/OPT_E66.50N_2005-09"
                filename2 = HOWATDIR + "OPT_E66.50N_2005-07/OPT_E66.50N_2005-07"
            elif '200608' in date:
                filename1 = HOWATDIR + "OPT_E66.50N_2006-09/OPT_E66.50N_2006-09"
                filename2 = HOWATDIR + "OPT_E66.50N_2006-07/OPT_E66.50N_2006-07"

        files_vx = ' '+filename1+'.vx.tif '+filename2+'.vx.tif '+\
                file_velocity_all+'_vx.tif'+' '+file_velocity_global+'_vx.tif'
        files_vy = ' '+filename1+'.vy.tif '+filename2+'.vy.tif '+\
                file_velocity_all+'_vy.tif'+' '+file_velocity_global+'_vy.tif'
    else:
        # Use TSX velocity maps
        filename1, time1 = tsx_near_time(time, glacier, just_filename=True)
        filename2, time2 = tsx_near_time(time - 11 / 365.,
                                         glacier,
                                         just_filename=True)
        filename3, time3 = tsx_near_time(time + 11 / 365.,
                                         glacier,
                                         just_filename=True)

        if abs(time - time2) < abs(time - time3):
            files_vx = ' '+filename1+'_vx.tif'+' '+filename2+'_vx.tif'+\
         ' '+filename3+'_vx.tif'+' '+file_velocity_all+'_vx.tif'+' '+file_velocity_global+'_vx.tif'
            files_vy = ' '+filename1+'_vy.tif'+' '+filename2+'_vy.tif'+\
         ' '+filename3+'_vy.tif'+' '+file_velocity_all+'_vy.tif'+' '+file_velocity_global+'_vy.tif'
        else:
            files_vx = ' '+filename1+'_vx.tif'+' '+filename3+'_vx.tif'+\
              ' '+filename2+'_vx.tif'+' '+file_velocity_all+'_vx.tif'+' '+file_velocity_global+'_vx.tif'
            files_vy = ' '+filename1+'_vy.tif'+' '+filename3+'_vy.tif'+\
              ' '+filename2+'_vy.tif'+' '+file_velocity_all+'_vy.tif'+' '+file_velocity_global+'_vy.tif'

    CURRENTDIR = os.getcwd()
    os.chdir(OUTDIR)
    filename_vx = 'mosaic-' + date + '-vx'
    filename_vy = 'mosaic-' + date + '-vy'
    if dx == 'none':
        os.system('dem_mosaic --hole-fill-length 5 --t_projwin '+str(xmin)+' '+str(ymin)+' '+str(xmax)+\
        ' '+str(ymax)+' --priority-blending-length 10 -o'+filename_vx+files_vx)
        os.system('dem_mosaic --hole-fill-length 5 --t_projwin '+str(xmin)+' '+str(ymin)+' '+str(xmax)+\
        ' '+str(ymax)+' --priority-blending-length 10 -o'+filename_vy+files_vy)
    else:
        os.system('dem_mosaic --hole-fill-length 5 --t_projwin '+str(xmin)+' '+str(ymin)+' '+str(xmax)+\
        ' '+str(ymax)+' --tr '+str(dx)+' --priority-blending-length 10 -o'+filename_vx+files_vx)
        os.system('dem_mosaic --hole-fill-length 5 --t_projwin '+str(xmin)+' '+str(ymin)+' '+str(xmax)+\
        ' '+str(ymax)+' --tr '+str(dx)+' --priority-blending-length 10 -o'+filename_vy+files_vy)

    xu, yu, uu = geotifflib.read(filename_vx + "-tile-0.tif")
    xv, yv, vv = geotifflib.read(filename_vy + "-tile-0.tif")

    if (blur == True) and (dx == 'none'):
        print "Blurring DEM over 17 pixels (roughly 1.5km in each direction)..."
        # 17 pixel gaussian blur
        vx_blur = scipy.ndimage.filters.gaussian_filter(uu,
                                                        sigma=2,
                                                        truncate=4)
        vy_blur = scipy.ndimage.filters.gaussian_filter(vv,
                                                        sigma=2,
                                                        truncate=4)
    else:
        vx_blur = uu
        vy_blur = vv

    os.chdir(CURRENTDIR)

    # Calculate velocity magnitude
    vmag = np.sqrt(vx_blur**2 + vy_blur**2)

    ######################################################
    # Write out velocities to files for inversion solver #
    ######################################################

    # Interpolate to input grid
    xgrid, ygrid = np.meshgrid(x, y)
    fu = scipy.interpolate.RegularGridInterpolator((yu, xu),
                                                   vx_blur,
                                                   method='linear')
    vx = fu((ygrid, xgrid))
    fv = scipy.interpolate.RegularGridInterpolator((yv, xv),
                                                   vy_blur,
                                                   method='linear')
    vy = fv((ygrid, xgrid))

    if dir_velocity_out != 'none':
        #files = os.listdir(OUTDIR):
        #for file in files:
        #  if file.startswith('mosaic-'+date):
        #    shutil.copy(file,dir_velocity_out)

        # File for velocity in x-dir
        fidu = open(dir_velocity_out + "/udem.xy", "w")
        fidu.write('{}\n{}\n'.format(len(x), len(y)))

        # File for velocity in y-dir
        fidv = open(dir_velocity_out + "/vdem.xy", "w")
        fidv.write('{}\n{}\n'.format(len(x), len(y)))

        for i in range(0, len(x)):
            for j in range(0, len(y)):
                fidu.write('{} {} {}\n'.format(x[i], y[j], vx[j, i]))
                fidv.write('{} {} {}\n'.format(x[i], y[j], vy[j, i]))

        fidv.close()
        fidu.close()

    return vx, vy
コード例 #9
0
ファイル: vellib.py プロジェクト: tarynblack/big3
def variability(glacier, time1, time2):

    ''
    ''

    DIR_TSX = os.path.join(os.getenv("DATA_HOME"),
                           "Velocity/TSX/" + glacier + "/")

    if glacier == 'Helheim':
        xmin = 270000.0
        xmax = 354900.0
        ymin = -2601000.0
        ymax = -2541000.0
    elif glacier == 'Kanger':
        xmin = 457000.0
        xmax = 517000.0
        ymin = -2319100.0
        ymax = -2247100.0

    dx = dy = 100.
    nx = int(np.ceil((xmax - xmin) / dx) + 1)
    x = np.linspace(xmin, (nx - 1) * dx + xmin, nx)
    ny = int(np.ceil((ymax - ymin) / dx) + 1)
    y = np.linspace(ymin, (ny - 1) * dy + ymin, ny)
    xgrid, ygrid = np.meshgrid(x, y)
    coords = np.column_stack([ygrid.flatten(), xgrid.flatten()])

    #################
    # LOAD TSX Data #
    #################

    DIRs = os.listdir(DIR_TSX)

    # Get number of velocity files
    nt = 0
    for DIR in DIRs:
        if DIR.startswith('track'):
            nt = nt + 1

    # Set up variables
    velgrid = np.zeros([ny, nx, nt])
    mask = np.zeros([ny, nx, nt])
    velgrid_mask = np.zeros([ny, nx, nt])
    time = np.zeros(nt)
    ergrid = np.zeros([ny, nx, nt])

    # Load velocity and mask
    count = 0
    for j in range(0, len(DIRs)):
        DIR = DIRs[j]
        if DIR.startswith('track'):
            # Load velocity
            x1, y1, v1, vx1, vy1, ex1, ey1, time_file, interval1 = geodatlib.readvelocity(
                DIR_TSX, DIR, "mosaicOffsets")

            time[count] = time_file
            year, month, day = datelib.fracyear_to_date(time_file)

            xind1 = np.argmin(abs(x1 - xmin))
            xind2 = np.argmin(abs(x1 - xmax)) + 1
            yind1 = np.argmin(abs(y1 - ymin))
            yind2 = np.argmin(abs(y1 - ymax)) + 1

            # Load velocity
            try:
                # If the input and output grids have the same dimensions...
                velgrid[:, :, count] = v1[yind1:yind2, xind1:xind2]
            except:
                # Otherwise interpolate onto output grid
                f_dem = scipy.interpolate.RegularGridInterpolator(
                    [y1, x1],
                    v1,
                    bounds_error=False,
                    method='linear',
                    fill_value=float('nan'))
                v_flatten = f_dem(coords)

                # Reshape to grid
                velgrid[:, :, count] = np.reshape(v_flatten, (ny, nx))

            # Load mask
            date = "%04d%02d%02d" % (year, month, day)
            maskfile = DIR_TSX + 'TIF/' + DIR + '_' + date + '_' + 'mask.tif'
            if os.path.isfile(maskfile):
                xmask, ymask, mask[:, :, count] = geotifflib.read(maskfile)
            else:
                xmask, ymask, mask[:, :, count] = masklib.load_grid(
                    glacier, xmin, xmax, ymin, ymax, dx, icefront_time=time1)
                geotifflib.write_from_grid(xmask, ymask,
                                           np.flipud(mask[:, :, count]),
                                           float('nan'), maskfile)

            velgrid_mask[:, :, count] = np.array(velgrid[:, :, count])
            velgrid_mask[mask[:, :, count] == 1, count] = float('nan')

            count = count + 1

    del count, maskfile, date, xind1, yind1, xind2, yind2, year, month, x1, y1, vx1, vy1, ex1, ey1, time_file, interval1

    # Throw out obvious outliers
    ind = np.where(velgrid > 16.0e3)
    velgrid[ind[0], ind[1], ind[2]] = float('nan')
    velgrid_mask[ind[0], ind[1], ind[2]] = float('nan')
    print "Throwing out velocities above 16 km/yr to deal with outliers in Kanger record"

    # Only keep data that falls between time1 and time2, and sort that data by time
    sortind = np.argsort(time)
    time = time[sortind]
    velgrid_mask = velgrid_mask[:, :, sortind]
    velgrid = velgrid[:, :, sortind]

    ind = np.where((time > time1) & (time < time2))[0]
    velgrid_mask = velgrid_mask[:, :, ind]
    time = time[ind]
    velgrid = velgrid[:, :, ind]

    # Get average and std values
    velmean = np.nanmean(velgrid_mask, axis=2)

    # Get linear trends
    veltrend = np.zeros_like(velmean)
    veltrend_time1 = np.zeros_like(velmean)
    veltrend_time2 = np.zeros_like(velmean)
    veltrend_count = np.zeros_like(velmean)
    veltrend_p = np.zeros_like(velmean)
    veltrend_error = np.zeros_like(velmean)
    veltrend_r = np.zeros_like(velmean)
    veltrend_intercept = np.zeros_like(velmean)
    veltrend_p[:, :] = float('nan')
    veltrend[:, :] = float('nan')
    veltrend_error[:, :] = float('nan')
    veltrend_r[:, :] = float('nan')
    veltrend_intercept[:, :] = float('nan')
    for j in range(0, len(y)):
        for i in range(0, len(x)):
            nonnan = np.where((~(np.isnan(velgrid_mask[j, i, :]))))[0]
            if len(nonnan) > 0.75 * len(time):
                if (np.floor(np.min(time[nonnan])) == time1) and np.ceil(
                        np.max(time[nonnan])) == time2:
                    slope, intercept, r, p, std_err = stats.linregress(
                        time[nonnan], velgrid_mask[j, i, nonnan])
                    veltrend_count[j, i] = len(nonnan)
                    veltrend[j, i] = slope
                    veltrend_p[j, i] = p
                    veltrend_error[j, i] = std_err
                    veltrend_time1[j, i] = np.min(time[nonnan])
                    veltrend_time2[j, i] = np.max(time[nonnan])
                    veltrend_r[j, i] = r
                    veltrend_intercept[j, i] = intercept

    # Detrend velocity timeseries
    veldetrend = np.zeros_like(velgrid_mask)
    for i in range(0, len(time)):
        trend = veltrend_intercept + time[i] * veltrend
        veldetrend[:, :, i] = velgrid_mask[:, :, i] - trend

    # Calculate range of observed values
    velrange = np.zeros_like(velmean)
    velrange[:, :] = float('nan')
    for i in range(0, len(x)):
        for j in range(0, len(y)):
            nonnan = np.where(~(np.isnan(veldetrend[j, i, :])))[0]
            if len(nonnan) > 1:
                velrange[j, i] = np.max(veldetrend[j, i, nonnan]) - np.min(
                    veldetrend[j, i, nonnan])

    # Remove insignifcant trends
    ind = np.where(veltrend_p > 0.05)
    veltrend[ind] = float('nan')
    veltrend_error[ind] = float('nan')

    # Get number of nonnan velocities for each pixel
    velcount = np.zeros([ny, nx])
    for j in range(0, ny):
        for i in range(0, nx):
            nonnan = len(np.where(~(np.isnan(velgrid_mask[j, i, :])))[0])
            velcount[j, i] = nonnan

    sortind = np.argsort(time)
    velgrid_mask = velgrid_mask[:, :, sortind]
    time = time[sortind]

    return x, y, velgrid_mask, veltrend, veldetrend, velrange, velcount, veltrend_error, time
コード例 #10
0
ファイル: vellib.py プロジェクト: tarynblack/big3
def convert_binary_to_geotiff(glacier):
    '''
I'm getting tired of unpacking Ian's binary velocity files every time 
I need to use them, so I've set up a script to convert all of them to 
geotifflib. Then the "geodat" module checks to see if there are geotiffs 
before unpacking the binary files.
  '''

    DIRTOP_TSX = os.path.join(os.getenv("DATA_HOME"),
                              "Velocity/TSX/" + glacier + "/")
    DIRTOP_RADARSAT = os.path.join(os.getenv("DATA_HOME"),
                                   "Velocity/RADARSAT/Greenland/")

    # TSX files
    files = os.listdir(DIRTOP_TSX)
    for file in files:
        if file.startswith('track'):
            print file
            # Load binary data
            x, y, v, vx, vy, vz, ex, ey, time, interval = geodatlib.readbinary(
                DIRTOP_TSX + file + "/mosaicOffsets",
                nodatavalue=-2.0e9,
                read_vz=True)

            year, month, day = datelib.fracyear_to_date(time)

            # Set up date label for geotiff file
            date = "%04d%02d%02d" % (year, month, day)

            # Save as geotiff
            geotifflib.write_from_grid(
                x, y, np.flipud(v), -2.0e9,
                DIRTOP_TSX + "TIF/" + file + "_" + date + "_v.tif")
            geotifflib.write_from_grid(
                x, y, np.flipud(vx), -2.0e9,
                DIRTOP_TSX + "TIF/" + file + "_" + date + "_vx.tif")
            geotifflib.write_from_grid(
                x, y, np.flipud(vy), -2.0e9,
                DIRTOP_TSX + "TIF/" + file + "_" + date + "_vy.tif")
            geotifflib.write_from_grid(
                x, y, np.flipud(ex), -2.0e9,
                DIRTOP_TSX + "TIF/" + file + "_" + date + "_ex.tif")
            geotifflib.write_from_grid(
                x, y, np.flipud(ey), -2.0e9,
                DIRTOP_TSX + "TIF/" + file + "_" + date + "_ey.tif")
            geotifflib.write_from_grid(
                x, y, np.flipud(vz), -2.0e9,
                DIRTOP_TSX + "TIF/" + file + "_" + date + "_vz.tif")

    # RADARSAT files
    files = os.listdir(DIRTOP_RADARSAT)
    for file in files:
        if file.startswith('winter'):
            print file
            # Load binary data
            x, y, v, vx, vy, ex, ey, time, interval = geodatlib.readbinary(
                DIRTOP_RADARSAT + file + "/mosaicOffsets")

            # Save as geotiff

            geotifflib.write_from_grid(
                x, y, np.flipud(v), -2.0e9,
                DIRTOP_RADARSAT + "TIF/" + file + "_v.tif")
            geotifflib.write_from_grid(
                x, y, np.flipud(vx), -2.0e9,
                DIRTOP_RADARSAT + "TIF/" + file + "_vx.tif")
            geotifflib.write_from_grid(
                x, y, np.flipud(vy), -2.0e9,
                DIRTOP_RADARSAT + "TIF/" + file + "_vy.tif")
            geotifflib.write_from_grid(
                x, y, np.flipud(ex), -2.0e9,
                DIRTOP_RADARSAT + "TIF/" + file + "_ex.tif")
            geotifflib.write_from_grid(
                x, y, np.flipud(ey), -2.0e9,
                DIRTOP_RADARSAT + "TIF/" + file + "_ey.tif")

    return 1
コード例 #11
0
ファイル: vellib.py プロジェクト: tarynblack/big3
def inversion_2D(x, y, d, glacier, time, dir_velocity_out, filt_len='none'):

    xmin = np.min(x) - 2.0e3
    xmax = np.max(x) + 2.0e3
    ymin = np.min(y) - 2.0e3
    ymax = np.max(y) + 2.0e3

    OUTDIR = os.path.join(os.getenv("DATA_HOME"),
                          "Velocity/MosaicVelocities/" + glacier)

    # Large velocity map to fill in gaps in smaller velocity map
    file_velocity_global = os.path.join(
        os.getenv("DATA_HOME"),
        "Velocity/Random/Greenland/AllGLVel/mosaicOffsets")

    # Individual velocity map
    filename1, time1 = tsx_near_time(time, glacier, just_filename=True)
    filename2, time2 = tsx_near_time(time - 0.1, glacier, just_filename=True)
    filename3, time3 = tsx_near_time(time + 0.1, glacier, just_filename=True)
    year, month, day = datelib.fracyear_to_date(time1)
    date = "%04d%02d%02d" % (year, month, day)

    files_vx = ' '+filename1+'_vx.tif'+' '+filename2+'_vx.tif'+\
      ' '+filename3+'_vx.tif'+' '+file_velocity_global+'_vx.tif'
    files_vy = ' '+filename1+'_vy.tif'+' '+filename2+'_vy.tif'+\
      ' '+filename3+'_vy.tif'+' '+file_velocity_global+'_vy.tif'

    CURRENTDIR = os.getcwd()
    os.chdir(OUTDIR)
    filename_vx = 'mosaic-' + date + '-vx'
    if not (os.path.isfile(filename_vx + '-tile-0.tif')):
        os.system('dem_mosaic --t_projwin '+str(xmin)+' '+str(ymin)+' '+str(xmax)+\
        ' '+str(ymax)+' --priority-blending-length 10 -o'+filename_vx+files_vx)
    filename_vy = 'mosaic-' + date + '-vy'
    if not (os.path.isfile(filename_vy + '-tile-0.tif')):
        os.system('dem_mosaic --t_projwin '+str(xmin)+' '+str(ymin)+' '+str(xmax)+\
        ' '+str(ymax)+' --priority-blending-length 10 -o'+filename_vy+files_vy)

    xu, yu, uu = geotifflib.read(filename_vx + "-tile-0.tif")
    xv, yv, vv = geotifflib.read(filename_vy + "-tile-0.tif")

    fu = scipy.interpolate.RegularGridInterpolator((yu, xu), uu)
    vx = fu((y, x))
    fv = scipy.interpolate.RegularGridInterpolator((yv, xv), vv)
    vy = fv((y, x))

    vnonnan = np.sqrt(vx**2 + vy**2)

    # Filter velocities
    if filt_len != 'none':
        cutoff = (1 / filt_len) / (1 / (np.diff(d[1:3]) * 2))
        b, a = scipy.signal.butter(4, cutoff, btype='low')
        filtered = scipy.signal.filtfilt(b, a, vnonnan)
    else:
        filtered = np.array(vcomb)

    # Write out the velocity data
    fid = open(dir_velocity_out + "velocity.dat", 'w')
    R = len(filtered)
    fid.write('{0}\n'.format(R))
    for j in range(0, R):
        fid.write('{} {}\n'.format(d[j], filtered[j]))
    fid.close()

    return filtered
コード例 #12
0
ファイル: vellib.py プロジェクト: tarynblack/big3
def velocity_grid(glacier,
                  xmin=-np.Inf,
                  xmax=np.Inf,
                  ymin=-np.Inf,
                  ymax=np.Inf,
                  resolution=100):

    DIR_TSX = os.path.join(os.getenv("DATA_HOME"),
                           "Velocity/TSX/" + glacier + "/")

    dx = dy = float(resolution)
    nx = int(np.ceil((xmax - xmin) / dx) + 1)
    x = np.linspace(xmin, (nx - 1) * dx + xmin, nx)
    ny = int(np.ceil((ymax - ymin) / dx) + 1)
    y = np.linspace(ymin, (ny - 1) * dy + ymin, ny)
    xgrid, ygrid = np.meshgrid(x, y)
    coords = np.column_stack([ygrid.flatten(), xgrid.flatten()])

    #################
    # LOAD TSX Data #
    #################

    DIRs = os.listdir(DIR_TSX)

    # Get number of velocity files
    nt = 0
    for DIR in DIRs:
        if DIR.startswith('track'):
            nt = nt + 1

    # Set up variables
    velgrid = np.zeros([ny, nx, nt])
    time = np.zeros(nt)
    ergrid = np.zeros([ny, nx, nt])

    # Load velocity and mask
    count = 0
    for j in range(0, len(DIRs)):
        DIR = DIRs[j]
        if DIR.startswith('track'):
            # Load velocity
            x1, y1, v1, vx1, vy1, ex1, ey1, time_file, interval1 = geodatlib.readvelocity(
                DIR_TSX, DIR, "mosaicOffsets")

            time[count] = time_file
            year, month, day = datelib.fracyear_to_date(time_file)

            xind1 = np.argmin(abs(x1 - xmin))
            xind2 = np.argmin(abs(x1 - xmax)) + 1
            yind1 = np.argmin(abs(y1 - ymin))
            yind2 = np.argmin(abs(y1 - ymax)) + 1

            # Load velocity
            try:
                # If the input and output grids have the same dimensions...
                velgrid[:, :, count] = v1[yind1:yind2, xind1:xind2]
            except:
                # Otherwise interpolate onto output grid
                f_dem = scipy.interpolate.RegularGridInterpolator(
                    [y1, x1],
                    v1,
                    bounds_error=False,
                    method='linear',
                    fill_value=float('nan'))
                v_flatten = f_dem(coords)

                # Reshape to grid
                velgrid[:, :, count] = np.reshape(v_flatten, (ny, nx))

            count = count + 1

    # Sort velocities
    sortind = np.argsort(time)
    time = time[sortind]
    velgrid = velgrid[:, :, sortind]

    return x, y, velgrid, time
コード例 #13
0
        # Create elmer mesh
        call([
            "ElmerGrid", "14", "2", file_2d_temp + ".msh", "-autoclean",
            "-metis", partitions, "1"
        ])

        # Check to make sure the mesh was created successfully
        files = os.listdir(DIRM + file_2d_temp + '/partitioning.' +
                           str(partitions))
        success = False
        for file in files:
            if file.endswith('header'):
                success = True
        if success == False:
            year, month, day = datelib.fracyear_to_date(time1 + i * dt)
            sys.exit('Unsuccessful generation of mesh for date ' + str(year) +
                     str(month) + str(int(np.round(day))) + ', timesetep ' +
                     str(i))

        os.system("rm mesh2d.geo")

        os.system("tar -czf" + file_2d_temp + ".tar.gz" + " " + file_2d_temp)
        os.system("rm -r " + file_2d_temp)
    os.system("tar -czf mesh_gmsh.tar.gz *.msh")
    os.system("rm *.msh")

    os.chdir(CURDIR)
    del CURDIR, file_2d_temp, exterior_temp, xnew, ynew, zbed_new, zsur_new, zbot_new

##########################################