def main(): parser = getparser() args = parser.parse_args() #Note: res and extent are passed directly to warplib.memwarp_multi_fn, so can be many types s = malib.DEMStack(fn_list=args.src_fn_list, stack_fn=args.stack_fn, outdir=args.outdir, res=args.tr, extent=args.te, srs=args.t_srs, trend=args.trend, med=args.med, stats=args.stats, save=args.save, sort=args.sort, datestack=args.datestack) print(s.stack_fn)
def main(): parser = getparser() args = parser.parse_args() if args.stack_fn is not None: if os.path.exists(args.stack_fn): sys.exit("Found existing stack_fn: %s" % args.stack_fn) #Note: res and extent are passed directly to warplib.memwarp_multi_fn, so can be many types s = malib.DEMStack(fn_list=args.src_fn_list, stack_fn=args.stack_fn, outdir=args.outdir, \ res=args.tr, extent=args.te, srs=args.t_srs, \ trend=args.trend, robust=args.robust, n_thresh=args.min_n, min_dt_ptp=args.min_dt_ptp, n_cpu=args.n_cpu, \ med=args.med, stats=args.stats, save=args.save, sort=args.sort, datestack=args.datestack) print(s.stack_fn)
pltlib.add_scalebar(axa[0], res=res) plt.tight_layout() outdir = 'stack_anomaly' if not os.path.exists(outdir): os.makedirs(outdir) #dem_fn_list = glob.glob('*8m_trans_warp.tif') #stack = malib.DEMStack(dem_fn_list, med=True) #dem_ref_fn = 'rainier_allgood_mos-tile-0_warp.tif' #dem_ref = iolib.fn_getma(dem_ref_fn) stack_fn = sys.argv[1] #stack = malib.DEMStack(stack_fn=stack_fn, med=True) #dem_ref = stack.stack_med stack = malib.DEMStack(stack_fn=stack_fn) dem_ref = stack.stack_mean dem_ds = stack.get_ds() dem_clim = malib.calcperc(dem_ref, (2,98)) dem_fn_list = stack.fn_list anomaly_stack = stack.ma_stack - dem_ref anomaly_clim = np.max(np.abs(malib.calcperc(anomaly_stack, (1,99)))) anomaly_clim = (-anomaly_clim, anomaly_clim) #for dem_fn in [dem_ref_fn]+dem_fn_list: for n, dem_fn in enumerate(dem_fn_list): print('%i of %i: %s' % (n+1, len(dem_fn_list), dem_fn)) #print(dem_fn) #dem_ds = iolib.fn_getds(dem_fn) #dem = iolib.ds_getma(dem_ds) dem_fn = stack.fn_list[n]
for n, feat in enumerate(glac_shp_lyr): #glac_geom_orig = geolib.geom_dup(feat.GetGeometryRef()) feat_fn = rgi_name(feat) print(n, feat_fn) glac_geom = geolib.geom_dup(feat.GetGeometryRef()) glac_geom_extent = geolib.geom_extent(glac_geom) #Spatial filter aster_index_lyr.SetSpatialFilter(glac_geom) aster_count = aster_index_lyr.GetFeatureCount() print("ASTER count after spatial filter: %i" % aster_count) if aster_count > min_aster_count: fn_list = [] for aster_feat in aster_index_lyr: #Only 1 field from gdaltindex, 'location' fn = os.path.join(asterdir, aster_feat.GetField(0)) fn_list.append(fn) stack = malib.DEMStack(fn_list, outdir=os.path.join(stackdir, feat_fn), \ res='max', extent=glac_geom_extent, srs=aster_index_srs, mask_geom=glac_geom, n_thresh=min_aster_count, min_dt_ptp=min_dt_ptp) #if stack.ma_stack is not None: #sys.exit() #glac_geom_mask = geolib.geom2mask(glac_geom, stack.get_ds()) #ds_list = warplib.memwarp_multi_fn(fn_list, res='max', extent=glac_geom_extent) aster_index_lyr.ResetReading() #Generate plots #hs.sh */*mean.tif #for i in */*trend.tif; do imviewer.py -clim -10 10 -cmap RdYlBu -label 'Trend (m/yr)' -of png -overlay $(echo $i | sed 's/_trend/_mean_hs_az315/') $i -scalebar -outsize 8 8 -dpi 100; done #for i in */*count.tif; do imviewer.py -clim 0 20 -cmap inferno -label 'Count' -of png -overlay $(echo $i | sed 's/_count/_mean_hs_az315/') $i -scalebar -outsize 8 8 -dpi 100; done #for i in */*[0-9]_std.tif; do imviewer.py -clim 0 30 -label 'Std (m)' -of png -overlay $(echo $i | sed 's/_std/_mean_hs_az315/') $i -scalebar -outsize 8 8 -dpi 100; done
def main(): if len(sys.argv) != 2: sys.exit("Usage: %s stack.npz" % os.path.basename(sys.argv[0])) stack_fn = sys.argv[1] print "Loading stack" s = malib.DEMStack(stack_fn=stack_fn, stats=True, trend=True, save=False) global d d = s.date_list_o d_ptp = d[-1] - d[0] d_pad = 0.03*d_ptp global min_dt min_dt = d[0]-d_pad global max_dt max_dt = d[-1]+d_pad #Use these to set bounds to hardcode min/max of all stacks #import pytz #min_dt = datetime(1999,1,1) #min_dt = datetime(2007,1,1, tzinfo=pytz.utc) #max_dt = datetime(2015,12,31, tzinfo=pytz.utc) global source source = np.ma.array(s.source) global source_dict source_dict = get_source_dict() global error error = s.error global gt gt = s.gt global m m = s.ma_stack val = s.stack_mean count = s.stack_count std = s.stack_std trend = s.stack_trend detrended_std = s.stack_detrended_std stack_type = 'dem' global filter_outliers filter_outliers = False global pad global geoid_offset global plot_trend global plot_resid global errorbars if 'TSX' in source or 'ALOS' in source or 'RS1' in source or 'RS2' in source: stack_type = 'velocity' if 'zs' in stack_fn: stack_type = 'racmo' if 'meltrate' in stack_fn: stack_type = 'meltrate' if stack_type == 'velocity': #pad = 3 #Use this for Jak stack with RADARSAT data pad = 0 ylabel = 'Velocity (m/yr)' ylabel_rel = 'Relative Velocity (m/yr)' ylabel_resid = 'Detrended Velocity (m/yr)' plot4_label = 'Detrended std (m/yr)' hs = None alpha = 1.0 geoid_offset = False plot_trend = False plot_resid = False errorbars = False if 'RS' in source: filter_outliers = True elif stack_type == 'racmo': pad = 0 ylabel = 'RACMOFDM zs (m)' ylabel_rel = 'Relative RACMOFDM zs (m)' ylabel_resid = 'Detrended RACMOFDM zs (m)' plot4_label = 'Detrended std (m)' hs = None alpha = 1.0 geoid_offset = False plot_trend = True plot_resid = True errorbars = False elif stack_type == 'meltrate': pad = 3 ylabel = 'Melt Rate (m/yr)' ylabel_rel = 'Relative Melt Rate (m/yr)' ylabel_resid = 'Detrended Melt Rate (m/yr)' plot4_label = 'Detrended std (m/yr)' hs = None alpha = 1.0 geoid_offset = False plot_trend = True plot_resid = False errorbars = False else: #pad = 5 #pad = 1 pad = 3 ylabel = 'Elevation (m EGM2008)' ylabel_rel = 'Relative Elevation (m)' ylabel_resid = 'Detrended Elevation (m)' #plot4_label = 'Detrended std (m)' plot4_label = 'Elevation std (m)' s.mean_hillshade() hs = s.stack_mean_hs hs_clim = malib.calcperc(hs, (2,98)) alpha = 0.6 geoid_offset = False plot_trend = True plot_resid = True errorbars = True #Set color cycle reset_colors() global ms ms = 5 #fig = plt.figure(0, figsize=(14,12), facecolor='white') fig = plt.figure(0, figsize=(14,12)) #These record all points plotted on the context plots global ax_pt_list ax_pt_list = [[], [], [], []] interp = 'none' #interp = 'bicubic' #Overlay on mean_hs #Add colorbars imshow_kwargs = {'interpolation':interp} val_clim = malib.calcperc(val, (2,98)) ax0 = fig.add_subplot(221) if hs is not None: ax0.imshow(hs, cmap='gray', clim=hs_clim, **imshow_kwargs) im0 = ax0.imshow(val, cmap=cpt_rainbow, clim=val_clim, alpha=alpha, **imshow_kwargs) #This was used for Stanton et al figure #val_clim = (0, 50) #im0 = ax0.imshow(val, cmap=cmaps.inferno, clim=val_clim, alpha=alpha, **imshow_kwargs) ax0.set_adjustable('box-forced') pltlib.hide_ticks(ax0) pltlib.add_cbar(ax0, im0, ylabel) count_clim = malib.calcperc(count, (2,98)) #count_clim = malib.calcperc(count, (4,100)) ax1 = fig.add_subplot(222, sharex=ax0, sharey=ax0) if hs is not None: ax1.imshow(hs, cmap='gray', clim=hs_clim, **imshow_kwargs) im1 = ax1.imshow(count, cmap=cmaps.inferno, clim=count_clim, alpha=alpha, **imshow_kwargs) ax1.set_adjustable('box-forced') pltlib.hide_ticks(ax1) pltlib.add_cbar(ax1, im1, 'Count') #clim=(-20, 20) #trend_clim = malib.calcperc(trend, (1,99)) #trend_clim = malib.calcperc(trend, (2,98)) trend_clim = malib.calcperc(trend, (4,96)) #trend_clim = malib.calcperc(trend, (10,90)) max_abs_clim = max(np.abs(trend_clim)) trend_clim = (-max_abs_clim, max_abs_clim) ax2 = fig.add_subplot(223, sharex=ax0, sharey=ax0) #ax0.set_title("Trend") if hs is not None: ax2.imshow(hs, cmap='gray', clim=hs_clim, **imshow_kwargs) im2 = ax2.imshow(trend, cmap='RdBu', clim=trend_clim, alpha=alpha, **imshow_kwargs) ax2.set_adjustable('box-forced') pltlib.hide_ticks(ax2) pltlib.add_cbar(ax2, im2, 'Linear Trend (m/yr)') dstd_clim = (0, malib.calcperc(std, (0,95))[1]) #dstd_clim = (0, malib.calcperc(detrended_std, (0,98))[1]) ax3 = fig.add_subplot(224, sharex=ax0, sharey=ax0) if hs is not None: ax3.imshow(hs, cmap='gray', clim=hs_clim, **imshow_kwargs) im3 = ax3.imshow(detrended_std, cmap=cpt_rainbow, clim=dstd_clim, alpha=alpha, **imshow_kwargs) #im3 = ax3.imshow(std, cmap=cpt_rainbow, clim=dstd_clim, alpha=alpha, **imshow_kwargs) ax3.set_adjustable('box-forced') pltlib.hide_ticks(ax3) #pltlib.add_cbar(ax3, im3, 'Detrended Std (m)') pltlib.add_cbar(ax3, im3, plot4_label) global ax_list ax_list = [ax0, ax1, ax2, ax3] plt.autoscale(tight=True) plt.tight_layout() cid = fig.canvas.mpl_connect('button_press_event', onclick) fig1 = plt.figure(1) global ax_rel ax_rel = fig1.add_subplot(111) fmt_ax(ax_rel, ylabel=ylabel_rel, legend_source=source) fig2 = plt.figure(2) global ax_abs ax_abs = fig2.add_subplot(111) fmt_ax(ax_abs, ylabel=ylabel, legend_source=source) fig3 = plt.figure(3) global ax_resid ax_resid = fig3.add_subplot(111) fmt_ax(ax_resid, ylabel=ylabel_resid, legend_source=source) plt.axhline(0, color='k', linestyle='-', linewidth=0.6) """ #print "Saving figure" #fig_fn = os.path.splitext(s.stack_fn)[0] + '_context_maps.pdf' fig_fn = os.path.splitext(s.stack_fn)[0] + '_context_maps.png' plt.figure(0) plt.tight_layout() plt.savefig(fig_fn, dpi=300) fig_fn = os.path.splitext(s.stack_fn)[0] + '.png' plt.figure(2) #plt.ylim(70, 350) plt.tight_layout() plt.savefig(fig_fn, dpi=300) """ plt.show()
def main(): if len(sys.argv) < 2: sys.exit("Usage: %s stack.npz [mask.tif]" % os.path.basename(sys.argv[0])) #This will attempt to load cached files on disk load_existing = False #Limit spatial interpolation to input mask clip_to_mask = True #This expects a DEMStack object, see pygeotools/lib/malib.py or pygeotools/make_stack.py stack_fn = sys.argv[1] #Expects shp polygon as valid mask, in same projection as input raster mask_fn = sys.argv[2] stack = malib.DEMStack(stack_fn=stack_fn, save=False, trend=True, med=True, stats=True) #Get times of original obs t = stack.date_list_o.data t = t.astype(int) t[0] -= 0.1 t[-1] += 0.1 if clip_to_mask: m = geolib.shp2array(mask_fn, res=stack.res, extent=stack.extent) #Expand mask - hardcoded to 6 km import scipy.ndimage it = int(np.ceil(6000. / stack.res)) m = ~(scipy.ndimage.morphology.binary_dilation(~m, iterations=it)) apply_mask(stack.ma_stack, m) #This is used frome here on out test = stack.ma_stack test_ptp = stack.dt_stack_ptp test_source = np.array(stack.source) res = stack.res gt = np.copy(stack.gt) #Probably don't need rull-res stack if True: stride = 2 test = test[:, ::stride, ::stride] test_ptp = test_ptp[::stride, ::stride] res *= stride print("Using a stride of %i (%0.1f m)" % (stride, res)) gt[[1, 5]] *= stride print("Orig shape: ", test.shape) #Check to make sure all t have valid data tcount = test.reshape(test.shape[0], test.shape[1] * test.shape[2]).count(axis=1) validt_idx = (tcount > 0).nonzero()[0] test = test[validt_idx] test_source = test_source[validt_idx] t = t[validt_idx] print("New shape: ", test.shape) y, x = (test.count(axis=0) > 1).nonzero() x = x.astype(int) y = y.astype(int) #vm_t = test.reshape(test.shape[0], test.shape[1]*test.shape[2]) vm_t = test[:, y, x] vm_t_flat = vm_t.ravel() idx = ~np.ma.getmaskarray(vm_t_flat) #These are values VM = vm_t_flat[idx] #Determine scaling factors for x and y coords #Should be the same for both xy_scale = max(x.ptp(), y.ptp()) xy_offset = min(x.min(), y.min()) #This scales t to encourage interpolation along the time axis rather than spatial axis t_factor = 16. t_scale = t.ptp() * t_factor t_offset = t.min() xn = rangenorm(x, xy_offset, xy_scale) yn = rangenorm(y, xy_offset, xy_scale) tn = rangenorm(t, t_offset, t_scale) X = np.tile(xn, t.size)[idx] Y = np.tile(yn, t.size)[idx] T = np.repeat(tn, x.size)[idx] #These are coords pts = np.vstack((X, Y, T)).T #Step size in days #ti_dt = 91.3125 #ti_dt = 121.75 ti_dt = 365.25 #Set min and max times for interpolation #ti = np.arange(t.min(), t.max(), ti_dt) ti_min = timelib.dt2o(datetime(2008, 1, 1)) ti_max = timelib.dt2o(datetime(2015, 1, 1)) #Interpolate at these times ti = np.arange(ti_min, ti_max, ti_dt) #Annual #ti = timelib.dt2o([datetime(2008,1,1), datetime(2009,1,1), datetime(2010,1,1), datetime(2011,1,1), datetime(2012,1,1), datetime(2013,1,1), datetime(2014,1,1), datetime(2015,1,1)]) tin = rangenorm(ti, t_offset, t_scale) """ #Never got this working efficiently, but preserved for reference #Radial basis function interpolation #Need to normalize to input cube print "Running Rbf interpolation for %i points" % X.size rbfi = scipy.interpolate.Rbf(Xn,Yn,Tn,VM, function='linear', smooth=0.1) #rbfi = scipy.interpolate.Rbf(Xn,Yn,Tn,VM, function='gaussian', smooth=0.000001) #rbfi = scipy.interpolate.Rbf(Xn,Yn,Tn,VM, function='inverse', smooth=0.00001) print "Sampling result at %i points" % xin.size vmi_rbf = rbfi(xin, yin, tin.repeat(x.size)) vmi_rbf_ma[:,y,x] = np.ma.fix_invalid(vmi_rbf.reshape((ti.size, x.shape[0]))) """ #Attempt to load cached interpolation function int_fn = '%s_LinearNDint_%i_%i.pck' % (os.path.splitext(stack_fn)[0], test.shape[1], test.shape[2]) print(int_fn) if load_existing and os.path.exists(int_fn): print("Loading pickled interpolation function: %s" % int_fn) f = open(int_fn, 'rb') linNDint = pickle.load(f) else: #NearestND interpolation (fast) #print "Running NearestND interpolation for %i points" % X.size #NearNDint = scipy.interpolate.NearestNDInterpolator(pts, VM, rescale=True) #LinearND interpolation print("Running LinearND interpolation for %i points" % X.size) #Note: this breaks qhull for lots of input points linNDint = scipy.interpolate.LinearNDInterpolator(pts, VM, rescale=False) print("Saving pickled interpolation function: %s" % int_fn) f = open(int_fn, 'wb') pickle.dump(linNDint, f, protocol=2) f.close() vmi_fn = '%s_%iday.npy' % (os.path.splitext(int_fn)[0], ti_dt) if load_existing and os.path.exists(vmi_fn): print('Loading existing interpolated stack: %s' % vmi_fn) vmi_ma = np.ma.fix_invalid(np.load(vmi_fn)['arr_0']) else: #Once tesselation is complete, sample each timestep in parallel print("Sampling %i points at %i timesteps, %i total" % (x.size, ti.size, x.size * ti.size)) #Prepare array to hold output vmi_ma = np.ma.masked_all((ti.size, test.shape[1], test.shape[2])) """ #This does all points at once #vmi = linNDint(ptsi) #vmi_ma[:,y,x] = np.ma.fix_invalid(vmi.reshape((ti.size, x.shape[0]))) #This does interpolation serially by timestep for n, i in enumerate(ti): print n, i, timelib.o2dt(i) vmi_ma[n,y,x] = linNDint(x, y, i.repeat(x.size)).T """ #Parallel processing pool = mp.Pool(processes=None) results = [ pool.apply_async(dto_interp, args=(linNDint, xn, yn, i)) for i in tin ] results = [p.get() for p in results] results.sort() for n, r in enumerate(results): t_rescale = r[0] * t_scale + t_offset print(n, t_rescale, timelib.o2dt(t_rescale)) vmi_ma[n, y, x] = r[1] vmi_ma = np.ma.fix_invalid(vmi_ma) print('Saving interpolated stack: %s' % vmi_fn) np.save(vmi_fn, vmi_ma.filled(np.nan)) origt = False if origt: print("Sampling %i points at %i original timesteps" % (x.size, t.size)) vmi_ma_origt = np.ma.masked_all((t.size, test.shape[1], test.shape[2])) #Parallel pool = mp.Pool(processes=None) results = [ pool.apply_async(dto_interp, args=(linNDint, x, y, i)) for i in t ] results = [p.get() for p in results] results.sort() for n, r in enumerate(results): print(n, r[0], timelib.o2dt(r[0])) vmi_ma_origt[n, y, x] = r[1] vmi_ma_origt = np.ma.fix_invalid(vmi_ma_origt) #print 'Saving interpolated stack: %s' % vmi_fn #np.save(vmi_fn, vmi_ma.filled(np.nan)) #Write out a proper stack, for use by stack_melt and flux gate mass budget if True: out_stack = deepcopy(stack) out_stack.stats = False out_stack.trend = False out_stack.datestack = False out_stack.write_stats = False out_stack.write_trend = False out_stack.write_datestack = False out_stack.ma_stack = vmi_ma out_stack.stack_fn = os.path.splitext(vmi_fn)[0] + '.npz' out_stack.date_list_o = np.ma.array(ti) out_stack.date_list = np.ma.array(timelib.o2dt(ti)) out_fn_list = [ timelib.print_dt(i) + '_LinearNDint.tif' for i in out_stack.date_list ] out_stack.fn_list = out_fn_list out_stack.error = np.zeros_like(out_stack.date_list_o) out_stack.source = np.repeat('LinearNDint', ti.size) out_stack.gt = gt out_stack.res = res out_stack.savestack() sys.exit() """ #Other interpolation methods #vmi = scipy.interpolate.griddata(pts, VM, ptsi, method='linear', rescale=True) #Kriging #Should explore this more - likely the best option #http://connor-johnson.com/2014/03/20/simple-kriging-in-python/ #http://resources.esri.com/help/9.3/arcgisengine/java/gp_toolref/geoprocessing_with_3d_analyst/using_kriging_in_3d_analyst.htm #PyKrige does moving window Kriging, but only in 2D #https://github.com/bsmurphy/PyKrige/pull/5 #Could do tiled kriging with overlap in parallel #Split along x and y direction, preserve all t #Need to generate semivariogram globally though, then pass to each tile #See malib sliding_window wx = wy = 30 wz = test.shape[0] overlap = 0.5 dwx = dwy = int(overlap*wx) gp_slices = malib.nanfill(test, malib.sliding_window, ws=(wz,wy,wx), ss=(0,dwy,dwx)) vmi_gp_ma = np.ma.masked_all((ti.size, test.shape[1], test.shape[2])) vmi_gp_mse_ma = np.ma.masked_all((ti.size, test.shape[1], test.shape[2])) out = [] for i in gp_slices: y, x = (i.count(axis=0) > 0).nonzero() x = x.astype(int) y = y.astype(int) vm_t = test[:,y,x] vm_t_flat = vm_t.ravel() idx = ~np.ma.getmaskarray(vm_t_flat) #These are values VM = vm_t_flat[idx] #These are coords X = np.tile(x, t.size)[idx] Y = np.tile(y, t.size)[idx] T = np.repeat(t, x.size)[idx] pts = np.vstack((X,Y,T)).T xi = np.tile(x, ti.size) yi = np.tile(y, ti.size) ptsi = np.array((xi, yi, ti.repeat(x.size))).T #gp = GaussianProcess(regr='linear', verbose=True, normalize=True, theta0=0.1, nugget=2) gp = GaussianProcess(regr='linear', verbose=True, normalize=True, nugget=2) gp.fit(pts, VM) vmi_gp, vmi_gp_mse = gp.predict(ptsi, eval_MSE=True) vmi_gp_ma = np.ma.masked_all((ti.size, i.shape[1], i.shape[2])) vmi_gp_ma[:,y,x] = np.array(vmi_gp.reshape((ti.size, x.shape[0]))) vmi_gp_mse_ma = np.ma.masked_all((ti.size, i.shape[1], i.shape[2])) vmi_gp_mse_ma[:,y,x] = np.array(vmi_gp_mse.reshape((ti.size, x.shape[0]))) out.append(vmi_gp_ma) #Now combine intelligently print "Gaussian Process regression" pts2d_vm = vm_t[1] pts2d = np.vstack((x,y))[~(np.ma.getmaskarray(pts2d_vm))].T pts2di = np.vstack((x,y)).T gp = GaussianProcess(regr='linear', verbose=True, normalize=True, theta0=0.1, nugget=1) gp.fit(pts, VM) print "Gaussian Process prediction" vmi_gp, vmi_gp_mse = gp.predict(ptsi, eval_MSE=True) print "Converting to stack" vmi_gp_ma = np.ma.masked_all((ti.size, test.shape[1], test.shape[2])) vmi_gp_ma[:,y,x] = np.array(vmi_gp.reshape((ti.size, x.shape[0]))) vmi_gp_mse_ma = np.ma.masked_all((ti.size, test.shape[1], test.shape[2])) vmi_gp_mse_ma[:,y,x] = np.array(vmi_gp_mse.reshape((ti.size, x.shape[0]))) sigma = np.sqrt(vmi_gp_mse_ma) """ """
if dt_start is not None: dt_start = datetime.strptime(args.dt_start, '%Y%m%d') if dt_end is not None: dt_end = datetime.strptime(args.dt_end, '%Y%m%d') #Clean this up if args.fn is not None: if os.path.exists(args.fn): fn = args.fn ds = gdal.Open(fn) site_list = site_filter_extent_ds(ds, pad=args.extent_pad) elif args.extent is not None: site_list = site_filter_extent(extent, pad=args.extent_pad) elif args.stack_fn is not None: #DEM stack, can be used to plot lines on SNOTEL time series stack = malib.DEMStack(stack_fn=args.stack_fn) dem_dt = stack.date_list ds = stack.get_ds() site_list = site_filter_extent_ds(ds, pad=args.extent_pad) else: sys.exit("Must provide valid raster filename or lat/lon extent") #sitename = 'baker' #site_list = [999, 909, 1011, 910] #sitename = 'gm' #site_list = [622, 682] if site_list is None: sys.exit("No valid sites identified") vlist = args.vlist