def transform(in_proj = 'EPSG:3011',out_proj = 'EPSG:4326', lat=0.0, lon=0.0): """ Transform coordinates from one spatial reference system to another. in_proj is your current reference system out_proj is the reference system you want to transform to, default is EPSG:4326 = WGS84 (Another good is EPSG:4258 = ETRS89 (Europe), almost the same as WGS84 (in Europe) and not always clear if coordinates are in WGS84 or ETRS89, but differs <1m. lat = latitude lon = longitude To find your EPSG check this website: http://spatialreference.org/ref/epsg/ """ import mpl_toolkits.basemap.pyproj as pyproj o_proj = pyproj.Proj("+init="+out_proj) i_proj = pyproj.Proj("+init="+in_proj) if type(lat) == list: if len(lat) != len(lon): print(u'Length of Latitude differs from length of Longitude! When providing lists och coordinates they must have the same length') x, y = None, None else: x = [] y = [] for i in range(len(lat)): x_i, y_i = pyproj.transform(i_proj, o_proj, float(lon[i]), float(lat[i])) x.append(x_i) y.append(y_i) else: x, y = pyproj.transform(i_proj, o_proj, float(lon), float(lat)) return y, x
def pixel_cords(y, x): #may need to take time and LS scene numbers """ Takes the y and x values and calculates the bound of a pixel. returns the cords of the top left and bottom right of the pixel """ wgs84=pyproj.Proj("+init=EPSG:4326") #projection of the GIMMS AUS dataset filein = glob.glob("%s/*090_079*.tif" % (LS_workingpath)) filename = filein[0] #top left corner of the GIMMS pixel cy = -10 - (y*1/12) cx = 112 + (x*1/12) #bottom right (cal by using the top left of the next diagional pixel) cby = -10 - ((y+1)*1/12) cbx = 112 + ((x+1)*1/12) print cy, cx, cby, cbx # sys.exit() with rasterio.drivers(): with rasterio.open(filename, 'r') as src: im, = src.read() print(src.crs) LS_CORDS = pyproj.Proj(src.crs) # Convert x, y from isn2004 to UTM27N print wgs84(cy, cx) # xx, yy = pyproj.transform(wgs84, LS_CORDS, cy, cx) ulx, uly = pyproj.transform(wgs84, LS_CORDS, cx, cy) lrx, lry = pyproj.transform(wgs84, LS_CORDS, cbx, cby) print 'top left:', uly, ulx, 'bottom right:', lry , lrx # col, row = im.shape # lat_x, long_y = src.affine * (y, x) # print long_y, lat_x print src.bounds # im.close() # -projwin ulx uly lrx lry # print 'gdal_translate -projwin %d %d %d %d %s/%s %s/%s' % (ulx, uly, lrx, lry, LS_workingpath, filename, LS_workingpath, outname) n = 0 for fn in filein: print n layer = ['BS', 'NPV', 'PV', 'UE', ] print fn outname = "%s/LS_%s_crop.tif" % (LS_workingpath, layer[n]) # print outname sub.call(['gdal_translate -projwin %d %d %d %d %s %s' % (ulx, uly, lrx, lry, filename, outname)], shell=True) n+=1
def contains(self, lat, lon): x, y = pyproj.transform(wgs84, pj_laea, lon, lat) point = Point(x, y) if self.polygon.contains(point): return True else: return False
def reproj_wgs84(self): # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj( "+proj=sinu +R=6371007.181000 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") self.Lon, self.Lat = pyproj.transform(sinu, wgs84, self.xv, self.yv)
def coords2mn(self, grid, station_names, station_x, station_y, grid_epsg=4326, station_epsg=4326): '''Calculate nearest m, n indices on a grid for an array of type (['name', x, y]) where x and y are coordinates (longitude/latitude or eastin/northing etc.). If the two are different coordinate systems, will convert the array to the grid coordinate system (using EPSG codes, default is 4326=WGS84''' def find_nearest(grid, query): m = np.unravel_index( np.abs(grid.y - query[1]).argmin(), np.shape(grid.y))[0] n = np.unravel_index( np.abs(grid.x - query[0]).argmin(), np.shape(grid.x))[1] return [m, n] grid_proj = pyproj.Proj("+init=EPSG:%i" % grid_epsg) station_proj = pyproj.Proj("+init=EPSG:%i" % station_epsg) if grid_epsg != station_epsg: station_x, station_y = pyproj.transform(station_proj, grid_proj, station_x, station_y) obs_idx = [ find_nearest(grid, [station_x[i], station_y[i]]) for i in range(0, np.size(station_names) - 1) ] self.names = station_names self.m = [i[0] for i in obs_idx] self.n = [i[1] for i in obs_idx] self.num_obs = np.shape(obs_idx)[0]
def wgs84_to_osgb36(long,lat): ''' Function to convert coordinates from WGS84 to OSGB36 It takes (Long,Lat) and returns (X,Y) ''' (x,y)=pyproj.transform(wgs84, osgb36,long,lat) return(x,y)
def bng2latlon(eastings, northings): """ Converts British national grid coordinates to lat/lon. """ bng = pyproj.Proj(init='epsg:27700') wgs84 = pyproj.Proj(init='epsg:4326') lon,lat = pyproj.transform(bng, wgs84, eastings, northings) return lon,lat
def latlon2bng(lon,lat): """ Converts lon/lat to British national grid coords. """ bng = pyproj.Proj(init='epsg:27700') wgs84 = pyproj.Proj(init='epsg:4326') easting, northing = pyproj.transform(wgs84, bng, lon, lat) easting = round(easting, 0) northing = round(northing, 0) return easting, northing
def getCoord(dataframe): df = dataframe.copy() WGS84 = pyproj.Proj(init='epsg:4326') Lambert2 = pyproj.Proj(init='epsg:27572') for i in np.arange(len(df)): x = df.ix[i].X_COORDINATE y = df.ix[i].Y_COORDINATE lons, lats = pyproj.transform(Lambert2, WGS84, x, y) df.set_value(i, 'LONGITUDE', lons) df.set_value(i, 'LATITUDE', lats) return df
def flowline_latlon(coords, fromproj=pyproj.Proj("+init=epsg:3413"), toproj=pyproj.Proj("+init=EPSG:4326")): """Convert coords into lat/lon so that Basemap can convert them back for plotting (don't know why this is necessary, but it works) Defaults: fromproj = NSIDC Polar Stereographic North toproj = WGS84 lat-lon """ xs = coords[:,0] ys = coords[:,1] x_lon, y_lat = pyproj.transform(fromproj, toproj, xs, ys) latlon_coords = np.asarray(zip(x_lon, y_lat)) return latlon_coords
def extract_coord_val(self, lon_sel, lat_sel): # Convert requested lat/lon to sinusoidal coords sinu = pyproj.Proj( "+proj=sinu +R=6371007.181000 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon_sel_sinu, lat_sel_sinu = pyproj.transform(wgs84, sinu, lon_sel, lat_sel) # Method for extracting raster values at given coordinates y_sel = int((lat_sel_sinu - self.originY) / self.pixelHeight) x_sel = int((lon_sel_sinu - self.originX) / self.pixelWidth) return self.data[y_sel, x_sel]
def my_project(in_east, in_north, direction): ''' Sample user-defined projection and inverse projection of (lon,lat) to (x,y) function [out_east,out_north] = my_project(in_east,in_north,direction) DESCRIPTION: Define projections between geographical and Euclidean coordinates INPUT: in_east = 1D vector containing longitude (forward) x (reverse) in_north = 1D vector containing latitude (forward) y (reverse) direction = ['forward' ; 'inverse'] OUTPUT: (lon,lat) or (x,y) depending on choice of forward or reverse projection EXAMPLE USAGE lon,lat = my_project(x,y,'reverse') ''' #import mpl_toolkits.basemap.pyproj as pyproj from mpl_toolkits.basemap import pyproj #state_plane = pyproj.Proj(r'+proj=tmerc +datum=NAD83 +lon_0=-70d10 lat_0=42d50 k=.9999666666666667 x_0=900000 y_0=0 +to_meter=1') #state_plane = pyproj.Proj(r'+proj=tmerc +lat_0=42.83333333333334 +lon_0=-70.16666666666667 +k=0.9999666666666667 +x_0=900000 +y_0=0 +ellps=GRS80 +units=m +no_defs') state_plane = pyproj.Proj(r'+proj=tmerc +lat_0=42d50 +lon_0=-70d10 +k=0.9999666666666667 +x_0=900000 +y_0=0 +ellps=GRS80 +units=m +no_defs') wgs = pyproj.Proj(proj='latlong', datum='WGS84', ellps='WGS84') if direction=='forward': lon = in_east lat = in_north x,y = pyproj.transform(wgs, state_plane, lon, lat) return x, y else: x = in_east y = in_north lon, lat = pyproj.transform(state_plane, wgs, x, y) return lon, lat
def transformWrfProj(curs, domainNum, wrfLong, wrfLat, proj='epsg_4326'): """Uses pyproj to transform from WRF Lambert Conformal Conic to a new projection. Args: curs: WinDB2 cursor domainNum: WinDB2 domain number wrfLong: 1D or 2D Numpy array of WRF longitudes wrfLat: 1D or 2D Numpy array of WRF longitudes proj: Defaults to WGS84, use a pyproj legal projection string to change e.g. proj='epsg:4326' Returns: Reprojected long and lat arrays in of the same dimension of the input data """ import numpy as np import mpl_toolkits.basemap.pyproj as pyproj # Temporarily convert to 1D if we've been passed a grid if len(wrfLong.shape) == 2 and len(wrfLat.shape) == 2: twoDToOneD = True wrfLongShape = wrfLong.shape wrfLong = wrfLong.ravel() wrfLatShape = wrfLat.shape wrfLat = wrfLat.ravel() else: twoDToOneD = False # Get the SRID of the WRF domain sql = """SELECT proj4text FROM spatial_ref_sys WHERE srid=(SELECT st_srid(geom) FROM horizgeom WHERE domainkey=""" + str( domainNum) + """ LIMIT 1)""" curs.execute(sql) wrfProj4Str = curs.fetchone()[0] # Create the WRF projection wrfProj4 = pyproj.Proj(wrfProj4Str) # Transform from WRF to WGS84 wrfWgs84Lon, wrfWgs84Lat = pyproj.transform(wrfProj4, pyproj.Proj(init='epsg:4326'), wrfLong, wrfLat) # Convert back to 2D if necessary if twoDToOneD: wrfWgs84Lon = np.reshape(wrfWgs84Lon, wrfLongShape) wrfWgs84Lat = np.reshape(wrfWgs84Lat, wrfLatShape) return wrfWgs84Lon, wrfWgs84Lat
def radiuspois(name, radius): tmptags = [] #print radius lat = float(name[0]) lon = float(name[1]) if str(lat)[::-1].find('.') > 5 or str(lon)[::-1].find('.') > 5: print(lat, lon) nodesnum = 0 nodeids = [] lonswgs84, latswgs84 = pyproj.transform(wgs84, osm3857, lon, lat) p = Point(latswgs84, lonswgs84) p = p.buffer(radius) poly = Polygon(p.exterior) result = queryaround(radius, lat, lon) if result != 0 and result.nodes != []: for j in result.nodes: tmptags.append(j.tags) nodeids.append(result.node_ids) nodesnum += result.nodes.__len__() atomic_operation(poly, str((lat, lon)), tmptags, nodesnum, nodeids) return 0
def coords2mn(self, grid, station_names, station_x, station_y, grid_epsg = 4326, station_epsg = 4326): '''Calculate nearest m, n indices on a grid for an array of type (['name', x, y]) where x and y are coordinates (longitude/latitude or eastin/northing etc.). If the two are different coordinate systems, will convert the array to the grid coordinate system (using EPSG codes, default is 4326=WGS84''' def find_nearest(grid, query): m = np.unravel_index(np.abs(grid.y-query[1]).argmin(), np.shape(grid.y))[0] n = np.unravel_index(np.abs(grid.x-query[0]).argmin(), np.shape(grid.x))[1] return [m,n] grid_proj = pyproj.Proj("+init=EPSG:%i" % grid_epsg) station_proj = pyproj.Proj("+init=EPSG:%i" % station_epsg) if grid_epsg != station_epsg: station_x, station_y = pyproj.transform(station_proj, grid_proj, station_x, station_y) obs_idx = [find_nearest(grid,[station_x[i], station_y[i]]) for i in range(0, np.size(station_names)-1)] self.names = station_names self.m = [i[0] for i in obs_idx] self.n = [i[1] for i in obs_idx] self.num_obs = np.shape(obs_idx)[0]
def inside_network(self, epi_lats, epi_lons): """ This function returns epicenter coordinates located inside a seismic station network. The point-in-polygon problem is solved based on ray casting method. :param epi_lats: Latitudes of earthquake epicenters. :param epi_lons: Longitudes of earthquake epicenters. :type epi_lats: numpy.array, list/tuple or scalar :type epi_lons: numpy.array, list/tuple or scalar :returns: Epicenter coordinates located within network. The first and second columns are latitude and longitude, respectively. :rtype: numpy.array """ epi_x, epi_y = pyproj.transform(wgs84, pj_laea, epi_lons, epi_lats) r = [] for i, (x, y) in enumerate(zip(epi_x, epi_y)): epicenter = Point(x, y) if epicenter.within(self.polygon): r.append((epi_lats[i], epi_lons[i])) return np.array(r)
def run(FILE_NAME): DATAFIELD_NAME = 'sur_refl_b01_1' if USE_NETCDF: from netCDF4 import Dataset # The scaling equation isn't what netcdf4 expects, so turn it off. nc = Dataset(FILE_NAME) ncvar = nc.variables[DATAFIELD_NAME] ncvar.set_auto_maskandscale(False) data = ncvar[:].astype(np.float64) # Get any needed attributes. scale_factor = ncvar.scale_factor add_offset = ncvar.add_offset _FillValue = ncvar._FillValue valid_range = ncvar.valid_range units = ncvar.units long_name = ncvar.long_name # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. gridmeta = getattr(nc, 'StructMetadata.0') ul_regex = re.compile(r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile(r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) nx, ny = data.shape x = np.linspace(x0, x1, nx) y = np.linspace(y0, y1, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(sinu, wgs84, xv, yv) elif USE_GDAL: # GDAL import gdal GRID_NAME = 'MODIS_Grid_2D' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Get any needed attributes. meta = gdset.GetMetadata() scale_factor = np.float(meta['scale_factor']) add_offset = np.float(meta['add_offset']) _FillValue = np.float(meta['_FillValue']) valid_range = [np.float(x) for x in meta['valid_range'].split(', ')] units = meta['units'] long_name = meta['long_name'] # Construct the grid. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) x = np.linspace(x0, x0 + xinc*nx, nx) y = np.linspace(y0, y0 + yinc*ny, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(sinu, wgs84, xv, yv) del gdset else: # PyHDF from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.double) # Read geolocation dataset from HDF-EOS2 dumper output. GEO_FILE_NAME = 'lat_MYD09GQ.A2012246.h35v10.005.2012248075505.output' GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) lat = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) lat = lat.reshape(data.shape) GEO_FILE_NAME = 'lon_MYD09GQ.A2012246.h35v10.005.2012248075505.output' GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) lon = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) lon = lon.reshape(data.shape) # Read attributes. attrs = data2D.attributes(full=1) lna=attrs["long_name"] long_name = lna[0] vra=attrs["valid_range"] valid_range = vra[0] fva=attrs["_FillValue"] _FillValue = fva[0] sfa=attrs["scale_factor"] scale_factor = sfa[0] ua=attrs["units"] units = ua[0] aoa=attrs["add_offset"] add_offset = aoa[0] # Apply the attributes to the data. invalid = np.logical_or(data < valid_range[0], data > valid_range[1]) invalid = np.logical_or(invalid, data == _FillValue) data[invalid] = np.nan data = (data - add_offset) / scale_factor data = np.ma.masked_array(data, np.isnan(data)) # There is a wrap-around issue to deal with, as some of the grid extends # eastward over the international dateline. Adjust the longitude to avoid # a smearing effect. lon[lon < 0] += 360 m = Basemap(projection='cyl', resolution='l', llcrnrlat=-22.5, urcrnrlat=-7.5, llcrnrlon=167.5, urcrnrlon = 192.5) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-20, -5, 5), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(170, 200, 10), labels=[0, 0, 0, 1]) # Data too big for plotting? Nothing will show. # m.pcolormesh(lon, lat, data, latlon=True) m.pcolormesh(lon[::2,::2], lat[::2,::2], data[::2,::2], latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def loopTheCode(loopNumber): # DATA SETS NEEDED #Give it bouds to work within (Subsbample bit all Greenland TIFF) #N = 72.5 #75.5 #S = 66.85 #63.0 #E = -26. #-28.0 #W = -39. #-56.2 #outletLat = 72.827 #outletLon = -54.309 # RINK #Upernavik #[68.592]#[66.342] #[64.311] # [72.856] #[71.733] #[71.47] #[70.368]#[69.173] #,, # ,,68.545, outletLatList = [[77.498], [77.664], [74.399], [71.725], [68.592], [66.342], [64.234], [72.808], [71.781], [71.47], [70.368], [69.173]] # [77.509],[77.656] #[-32.943]#[-38.036] #[-49.655] # [-54.016] #[-51.675] #[-51.446] # [-50.616]#[-49.783] # ,,, # ,,-32.718 outletLonList = [[-65.216], [-66.045], [-56.099], [-52.417], [-32.943], [-38.036], [-49.524], [-54.118], [-51.463], [-51.446], [-50.616], [-49.730]] #[-65.847],[-66.128] #["KangerD"]#["Sermilik"] #["KNS"] #["Upernavik"] #["Rink"] #["KS"] # ["Store"]#["Jacobshavn"] #,,, #,,"Kangerdlussuaq", glacierNameList = [["Heilprin"], ["Tracy"], ["Alison"], ["Umiamako"], ["KangerD"], ["Sermilik"], ["KNS"], ["Upernavik"], ["Rink"], ["KS"], ["Store"], ["Jacobshavn"]] # there is a better way to do this outletLatList = outletLatList[loopNumber] outletLonList = outletLonList[loopNumber] glacierNameList = glacierNameList[loopNumber] fileProcessingFolderList = [ "W:\\MORLIGHEM_NSIDC\\ThuleArea\\", "W:\\MORLIGHEM_NSIDC\\ThuleArea\\", "W:\\MORLIGHEM_NSIDC\\NW_greenland\\", "W:\\MORLIGHEM_NSIDC\\NW_greenland\\", "W:\\MORLIGHEM_NSIDC\\KangerD\\", "W:\\MORLIGHEM_NSIDC\\NW_greenland\\", "W:\\MORLIGHEM_NSIDC\\d8_tri_landIce\\", "W:\\MORLIGHEM_NSIDC\\NW_greenland\\", "W:\\MORLIGHEM_NSIDC\\NW_greenland\\", "W:\\MORLIGHEM_NSIDC\\NW_greenland\\", "W:\\MORLIGHEM_NSIDC\\NW_greenland\\", "W:\\MORLIGHEM_NSIDC\\NW_greenland\\" ] fileProcessingFolder = fileProcessingFolderList[loopNumber] # errbed_tiff = 'W:\\MORLIGHEM_NSIDC\\temp_errbed_Layer.tif' mask_tiff = 'W:\\MORLIGHEM_NSIDC\\temp_mask_Layer.tif' mask_numpy = arcpy.RasterToNumPyArray(mask_tiff, nodata_to_value=0) # for masked array 0 is ocean, 1 is land 2 is ice. tell it if want ice, land + ice, etc. # all values that are ice or land set to one mask_numpy[mask_numpy >= 1] = 1 # find first basin file and use it to set numpy arrays ticker = 0 ticker2 = 0 for files in os.listdir(fileProcessingFolder): if files.endswith("_basin.tif"): ticker = ticker + 1 print files guideFile = arcpy.RasterToNumPyArray(fileProcessingFolder + files) if ticker == 1: break basinRawSum = np.zeros(np.shape(guideFile)) basinRawSum_loop = np.zeros(np.shape(guideFile)) #del guideFile for locIndex in xrange(len(outletLatList)): ptList = [] outletLat = outletLatList[locIndex] outletLon = outletLonList[locIndex] glacierName = glacierNameList[locIndex] print glacierName print locIndex print outletLat print outletLon outletLon_PS, outletLat_PS = pyproj.transform(WGS84, PS_north, outletLon, outletLat) print "At start of script, location is:", outletLon_PS, outletLat_PS # can only be one set of basin runs in for files in os.listdir(fileProcessingFolder): if files.endswith("_basin.tif"): tiff_for_geoRef = files # this is hardwired now, but should be fixed later #basinID = basin_numpy[3729,1330] # nuuk #basinID = basin_numpy[1422,1391] # isortoq #basinID = basin_numpy[1540,1289] # watson #basinID = basin_numpy[3605,1424] # watson #basinID = basin_numpy[1676,1359] # umiiviit #basinID = basin_numpy[2045,1143] # sarfartoq basin_numpy = arcpy.RasterToNumPyArray(fileProcessingFolder + files) # everything else to #basin_numpy[basin_numpy != basinID] = 0 # for basin ids that are allowed # This defines the search radius for each catchment # It is then visually confirmed that this give the right catchments for ri in xrange(-6000, 6001, 3000): for rj in xrange(-6000, 6001, 3000): #ri = 0 #rj = 0 outletLon_PS_range = outletLon_PS + ri outletLat_PS_range = outletLat_PS + rj #ptGeoms.append([outletLon_PS_range,outletLat_PS_range]) ptList.append([outletLon_PS_range, outletLat_PS_range]) # use the point(s) to get cell values basinID_resultObject = arcpy.GetCellValue_management( fileProcessingFolder + files, str(outletLon_PS_range) + " " + str(outletLat_PS_range)) if str(basinID_resultObject.getOutput(0)) != 'NoData': basinID = float(basinID_resultObject.getOutput(0)) #basinID = 16762 print basinID print files ticker2 = ticker2 + 1 print ticker2 # set basin to 1' basinRawSum_loop[basin_numpy == basinID] = 1.0 #basinRawSum_loop[basin_numpy != basinID] = 0 basinRawSum = basinRawSum + basinRawSum_loop basinRawSum_loop = np.zeros(np.shape(guideFile)) # # Dump Each step of basin raw sum # originalTiff = fileProcessingFolder+tiff_for_geoRef # # get info about the raster # r = arcpy.Raster(originalTiff) # LL_corner_subset = r.extent.lowerLeft # x_cell_size = r.meanCellWidth # y_cell_size = r.meanCellHeight # stringSpatialRef = r.spatialReference.exporttostring() # # shpName = files[0:-4] # outRaster5 = arcpy.NumPyArrayToRaster(basinRawSum,LL_corner_subset,x_cell_size,y_cell_size) # ,value_to_nodata=0 # arcpy.DefineProjection_management(outRaster5,stringSpatialRef) # # #arcpy.RasterToOtherFormat_conversion(outRaster1,fileProcessingFolder,"TIFF") # # outRaster5.save(fileProcessingFolder+glacierName+shpName+"STEP.tif") pt = arcpy.Point() ptGeoms = [] for p in ptList: pt.X = p[0] pt.Y = p[1] ptGeoms.append(arcpy.PointGeometry(pt)) print "At the end of script, location is:", pt.X, pt.Y # This create a shapefile of the points that were used to 'grab' each catchment arcpy.CopyFeatures_management( ptGeoms, fileProcessingFolder + glacierName + "outlet.shp") basinPercent = basinRawSum / np.max(basinRawSum) #plt.imshow(basinRawSum,vmin=0,vmax=1) #plt.show() # ## sae #numpyGeoTiff(basinPercent,fileProcessingFolder,glacierName,surface_tiff) # numpyarray = basinPercent #fileProcessingFolder filePrefix = glacierName originalTiff = fileProcessingFolder + tiff_for_geoRef arcpy.env.overwriteOutput = True arcpy.CheckOutExtension("Spatial") #Do min Max minRaster = np.zeros(np.shape(numpyarray)) maxRaster = np.zeros(np.shape(numpyarray)) maxRaster[numpyarray > 0.0] = 1 minRaster[numpyarray > 0.95] = 1 #maxMinRaster = maxRaster + minRaster # get info about the raster r = arcpy.Raster(originalTiff) LL_corner_subset = r.extent.lowerLeft x_cell_size = r.meanCellWidth y_cell_size = r.meanCellHeight stringSpatialRef = r.spatialReference.exporttostring() # DUMP TO GEOTIFF # percent ------------------------- arcpy.env.overwriteOutput = True #this overwrites the old file when re-run outRaster1 = arcpy.NumPyArrayToRaster( basinPercent, LL_corner_subset, x_cell_size, y_cell_size) # ,value_to_nodata=0 arcpy.DefineProjection_management(outRaster1, stringSpatialRef) #arcpy.RasterToOtherFormat_conversion(outRaster1,fileProcessingFolder,"TIFF") outRaster1.save(fileProcessingFolder + filePrefix + "FINAL_V02.tif") # MIN ------------------------- outRaster2 = arcpy.NumPyArrayToRaster( minRaster, LL_corner_subset, x_cell_size, y_cell_size) # ,value_to_nodata=0 arcpy.DefineProjection_management(outRaster2, stringSpatialRef) #arcpy.RasterToOtherFormat_conversion(outRaster1,fileProcessingFolder,"TIFF") outRaster2.save(fileProcessingFolder + filePrefix + "min_V02.tif") # The following inputs are layers or table views: "KangerDmin.tif" arcpy.gp.ContourList_sa( fileProcessingFolder + filePrefix + "min_V02.tif", fileProcessingFolder + filePrefix + "min_V02.shp", "1") # MAX ------------------------- outRaster3 = arcpy.NumPyArrayToRaster( maxRaster, LL_corner_subset, x_cell_size, y_cell_size) # ,value_to_nodata=0 arcpy.DefineProjection_management(outRaster3, stringSpatialRef) #arcpy.RasterToOtherFormat_conversion(outRaster1,fileProcessingFolder,"TIFF") outRaster3.save(fileProcessingFolder + filePrefix + "max_V02.tif") arcpy.gp.ContourList_sa( fileProcessingFolder + filePrefix + "max_V02.tif", fileProcessingFolder + filePrefix + "max_V02.shp", "1")
fout=open('CoastLine.vtk','w') nlines=0 fout.write('# vtk DataFile Version 2.0\n\ parabola - polyline\n\ ASCII\n\n\ DATASET POLYDATA\n') fout.write('POINTS %d float\n' %(len(vertices)+1)) #dummy point for avoiding changing numbering scheme fout.write('0 0 0\n') for vert in vertices: if args.proj!='': latlon=[float(v) for v in vert[0:2]] xyz = pyproj.transform(lla, myproj,latlon[0],latlon[1],0, radians=False) fout.write('%e %e %e\n' %tuple(xyz)) #fout.write('%e %e 0.\n' %(xyz[0],xyz[2])) else: fout.write('%s %s 0.\n' %tuple(vert[0:2])) fout.write('\nLINES %d %d\n' %(len(segments),3*len(segments))) for seg in segments: fout.write('2 %s %s\n' %tuple(seg)) fout.write('\n') fout.close() print("CoastLine.vtk successfully created")
def get_dataset_wgs84(FILE_NAME, GRID_NAME, DATAFIELD_NAME, plot=False): ''' FILE_NAME = Name of the HDF-EOS file GRID_NAME, and DATAFIELD_NAME parameters can all be found either by examining the .hdf file in HDFView or running list_mod_datasets ''' import gdal gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Construct the grid. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) #pdb.set_trace() # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181000 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(sinu, wgs84, xv, yv) # Read the attributes. meta = gdset.GetMetadata() long_name = meta['long_name'] units = meta['units'] _FillValue = np.float(meta['_FillValue']) scale_factor = np.float(meta['scale_factor']) add_offset = np.float(meta['add_offset']) valid_range = [np.float(x) for x in meta['valid_range'].split(', ')] # Close the hdf file del gdset invalid = np.logical_or(data > valid_range[1], data < valid_range[0]) invalid = np.logical_or(invalid, data == _FillValue) data[invalid] = np.nan data = scale_factor * (data - add_offset) data = np.ma.masked_array(data, np.isnan(data)) if plot: # Make a simple plot m = Basemap(projection='cyl', resolution='h', llcrnrlat=25.5, urcrnrlat=42.5, llcrnrlon=-122.5, urcrnrlon=-95.5) #m = Basemap(projection='cyl', resolution='h', # llcrnrlat= 32.45, urcrnrlat = 32.5, # llcrnrlon=-105.5, urcrnrlon = -105.45) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-10, 5, 5), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-70, -55, 5), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) #m.scatter(lon, lat, c='y') basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name), fontsize=11) fig = plt.gcf() plt.show() #pngfile = "{0}.py.png".format(basename) #fig.savefig(pngfile) return [data, lat, lon]
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'Extent' if USE_GDAL: import gdal GRID_NAME = 'Northern Hemisphere' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray() meta = gdset.GetMetadata() x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.float64) # Read global attribute. fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. ul_regex = re.compile(r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile(r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape xinc = (x1 - x0) / nx yinc = (y1 - y0) / ny x = np.linspace(x0, x0 + xinc*nx, nx) y = np.linspace(y0, y0 + yinc*ny, ny) xv, yv = np.meshgrid(x, y) # Reproject into WGS84 lamaz = pyproj.Proj("+proj=laea +a=6371228 +lat_0=90 +lon_0=0 +units=m") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(lamaz, wgs84, xv, yv) # Use a north polar azimuthal equal area projection. m = Basemap(projection='nplaea', resolution='l', boundinglat=40, lon_0=0) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(0, 90, 15), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 180, 45), labels=[0, 0, 0, 1]) # Bin the data as follows: # 0 -- snow-free land # 1-20% sea ice -- blue # 21-40% sea ice -- blue-cyan # 41-60% sea ice -- blue # 61-80% sea ice -- cyan-blue # 81-100% sea ice -- cyan # 101 -- permanent ice # 103 -- dry snow # 252 mixed pixels at coastlines # 255 ocean lst = ['#004400', '#0000ff', '#0044ff', '#0088ff', '#00ccff', '#00ffff', '#ffffff', '#440044', '#191919', '#000000', '#8888cc'] cmap = mpl.colors.ListedColormap(lst) bounds = [0, 1, 21, 41, 61, 81, 101, 103, 104, 252, 255] tickpts = [0.5, 11, 31, 51, 71, 91, 102, 103.5, 178, 253.5] norm = mpl.colors.BoundaryNorm(bounds, cmap.N) # The corners cause trouble, so chop them out. idx = slice(5, 721) m.pcolormesh(lon[idx, idx], lat[idx, idx], data[idx, idx], latlon=True, cmap=cmap, norm=norm) color_bar = plt.colorbar() color_bar.set_ticks(tickpts) color_bar.set_ticklabels(['snow-free\nland', '1-20% sea ice', '21-40% sea ice', '41-60% sea ice', '61-80% sea ice', '81-100% sea ice', 'permanent\nice', 'dry\nsnow', 'mixed pixels\nat coastlines', 'ocean']) color_bar.draw_all() basename = os.path.basename(FILE_NAME) long_name = DATAFIELD_NAME plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = "SI_06km_NH_89V_DAY" if USE_GDAL: import gdal GRID_NAME = "NpPolarGrid06km" gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Read projection parameters from global attribute. meta = gdset.GetMetadata() x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:, :].astype(np.float64) # Read global attribute. fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. ul_regex = re.compile( r"""UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)""", re.VERBOSE, ) match = ul_regex.search(gridmeta) x0 = np.float(match.group("upper_left_x")) y0 = np.float(match.group("upper_left_y")) lr_regex = re.compile( r"""LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)""", re.VERBOSE, ) match = lr_regex.search(gridmeta) x1 = np.float(match.group("lower_right_x")) y1 = np.float(match.group("lower_right_y")) ny, nx = data.shape xinc = (x1 - x0) / nx yinc = (y1 - y0) / ny # Apply the attributes information. # Ref: http://nsidc.org/data/docs/daac/ae_si6_6km_tbs.gd.html#2 data[data == 0] = np.nan data *= 0.1 data = np.ma.masked_array(data, np.isnan(data)) x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) args = [ "+proj=stere", "+lat_0=90", "+lon_0=-45", "+lat_ts=70", "+k=1", "+es=0.006693883", "+a=6378273", "+x_0=0", "+y_0=0", "+ellps=WGS84", "+datum=WGS84", ] pstereo = pyproj.Proj(" ".join(args)) wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(pstereo, wgs84, xv, yv) units = "K" long_name = DATAFIELD_NAME m = Basemap(projection="npstere", resolution="l", boundinglat=30, lon_0=0) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(0, 91, 20), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 181, 45), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title("{0}\n{1}".format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'Sea_Ice_by_Reflectance' if USE_GDAL: import gdal GRID_NAME = 'MOD_Grid_Seaice_1km' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray() # Construct the grid. meta = gdset.GetMetadata() x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.float64) # Read global attribute. fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. ul_regex = re.compile(r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile(r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape xinc = (x1 - x0) / nx yinc = (y1 - y0) / ny x = np.linspace(x0, x0 + xinc*nx, nx) y = np.linspace(y0, y0 + yinc*ny, ny) xv, yv = np.meshgrid(x, y) # Reproject the coordinates out of lamaz into lat/lon. lamaz = pyproj.Proj("+proj=laea +a=6371228 +lat_0=90 +lon_0=0 +units=m") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(lamaz, wgs84, xv, yv) # Draw a lambert equal area azimuthal basemap. m = Basemap(projection='laea', resolution='l', lat_ts=50, lat_0=50, lon_0=150, width=2500000,height=2500000) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(50, 91, 10), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(110, 181, 10), labels=[0, 0, 0, 1]) # Use a discretized colormap since we have only a few levels. # 0=missing data # 1=no decision # 11=night # 25=land # 37=inland water # 39=ocean # 50=cloud # 200=sea ice # 253=no input tile expected # 254=non-production mask" # 255=fill lst = ['#727272', '#b7b7b7', '#ffff96', '#00ff00', '#232375', '#232375', '#63c6ff', '#ff0000', '#3f3f3f', '#000000', '#000000'] cmap = mpl.colors.ListedColormap(lst) bounds = [0, 1, 11, 25, 37, 39, 50, 200, 253, 254, 255, 256] norm = mpl.colors.BoundaryNorm(bounds, cmap.N) m.pcolormesh(lon, lat, data, latlon=True, cmap=cmap, norm=norm) color_bar = plt.colorbar() color_bar.set_ticks([0.5, 5.5, 18, 31, 38, 44.5, 125, 226.5, 253.5, 254.5, 255.5]) color_bar.set_ticklabels(['missing', 'no decision', 'night', 'land', 'inland water', 'ocean', 'cloud', 'sea ice', 'no input tile\nexpected', 'non-production\nmask', 'fill']) color_bar.draw_all() basename = os.path.basename(FILE_NAME) long_name = DATAFIELD_NAME plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): DATAFIELD_NAME = "sur_refl_b02" if USE_NETCDF: from netCDF4 import Dataset # The scaling equation isn't what netcdf4 expects, so turn it off. nc = Dataset(FILE_NAME) ncvar = nc.variables[DATAFIELD_NAME] ncvar.set_auto_maskandscale(False) data = ncvar[:].astype(np.float64) # Get any needed attributes. scale_factor = ncvar.scale_factor add_offset = ncvar.add_offset _FillValue = ncvar._FillValue valid_range = ncvar.valid_range units = ncvar.units long_name = ncvar.long_name # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. gridmeta = getattr(nc, "StructMetadata.0") ul_regex = re.compile( r"""UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)""", re.VERBOSE, ) match = ul_regex.search(gridmeta) x0 = np.float(match.group("upper_left_x")) y0 = np.float(match.group("upper_left_y")) lr_regex = re.compile( r"""LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)""", re.VERBOSE, ) match = lr_regex.search(gridmeta) x1 = np.float(match.group("lower_right_x")) y1 = np.float(match.group("lower_right_y")) nx, ny = data.shape x = np.linspace(x0, x1, nx) y = np.linspace(y0, y1, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(sinu, wgs84, xv, yv) elif USE_GDAL: # GDAL import gdal GRID_NAME = "MOD_Grid_500m_Surface_Reflectance" gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Get any needed attributes. meta = gdset.GetMetadata() scale_factor = np.float(meta["scale_factor"]) add_offset = np.float(meta["add_offset"]) _FillValue = np.float(meta["_FillValue"]) valid_range = [np.float(x) for x in meta["valid_range"].split(", ")] units = meta["units"] long_name = meta["long_name"] # Construct the grid. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(sinu, wgs84, xv, yv) del gdset else: # PyHDF from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:, :].astype(np.double) # Read geolocation dataset from HDF-EOS2 dumper output. GEO_FILE_NAME = "lat_MYD09A1.A2007273.h03v07.005.2007285103507.output" GEO_FILE_NAME = os.path.join(os.environ["HDFEOS_ZOO_DIR"], GEO_FILE_NAME) lat = np.genfromtxt(GEO_FILE_NAME, delimiter=",", usecols=[0]) lat = lat.reshape(data.shape) GEO_FILE_NAME = "lon_MYD09A1.A2007273.h03v07.005.2007285103507.output" GEO_FILE_NAME = os.path.join(os.environ["HDFEOS_ZOO_DIR"], GEO_FILE_NAME) lon = np.genfromtxt(GEO_FILE_NAME, delimiter=",", usecols=[0]) lon = lon.reshape(data.shape) # Read attributes. attrs = data2D.attributes(full=1) lna = attrs["long_name"] long_name = lna[0] vra = attrs["valid_range"] valid_range = vra[0] fva = attrs["_FillValue"] _FillValue = fva[0] sfa = attrs["scale_factor"] scale_factor = sfa[0] ua = attrs["units"] units = ua[0] aoa = attrs["add_offset"] add_offset = aoa[0] # Apply the attributes to the data. invalid = np.logical_or(data < valid_range[0], data > valid_range[1]) invalid = np.logical_or(invalid, data == _FillValue) data[invalid] = np.nan data = (data - add_offset) * scale_factor data = np.ma.masked_array(data, np.isnan(data)) m = Basemap(projection="cyl", resolution="l", llcrnrlat=7.5, urcrnrlat=22.5, llcrnrlon=-162.5, urcrnrlon=-137.5) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(5, 25, 5), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-170, -130, 10), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title("{0}\n{1}".format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def transformLambert((lats, lons)): WGS84 = pyproj.Proj(init='epsg:4326') Lambert2 = pyproj.Proj(init='epsg:27572') x, y = pyproj.transform(WGS84, Lambert2, lons, lats) return (x, y)
def on_go(datafile,calbfile,dirname, calb_interp,linear_interp, despike,x_var,y_var, intsp_x, intsp_y, arcgrdval, data_match, buff, shape, gisgrd, surf, geopng, topo, blank, utm, wgs): #Variables related to direction of data collection #negative_gradient = True #x_var = (int(x_var))*2 #average measurement spacing 0.5 #y_var = (int(y_var))*2 #average traverse spacing 0.25 #try: filename = os.path.basename(datafile) if not os.path.exists(dirname): os.makedirs(dirname, mode = 511) if linear_interp.isChecked(): csv_path = dirname + '/' + 'Linear' + '/' + 'CSV_output' else: csv_path = dirname + '/' + 'Cubic' + '/' + 'CSV_output' if not os.path.exists(csv_path): os.makedirs(csv_path, mode = 511) print 'CSV folder created' flusher() output_name = csv_path + '/' + filename[0:-4] + '_calibrated.csv' if shape: if linear_interp.isChecked(): raw_shape_path = dirname + '/' + 'Linear' + '/' + 'shapefiles' else: raw_shape_path = dirname + '/' + 'Cubic' + '/' + 'shapefiles' if not os.path.exists(raw_shape_path): os.makedirs(raw_shape_path, mode = 511) print "\n"'Shapefile folder created' flusher() else: raw_shape_path = False if topo: if linear_interp.isChecked(): topo_path = dirname + '/' + 'Linear' + '/' + 'topo' else: topo_path = dirname + '/' + 'Cubic' + '/' + 'topo' if not os.path.exists(topo_path): os.makedirs(topo_path, mode = 511) print "\n"'Topo folder created' flusher() else: topo_path = False if geopng: if linear_interp.isChecked(): raw_img_path = dirname + '/' + 'Linear' + '/' + 'raw_images' else: raw_img_path = dirname + '/' + 'Cubic' + '/' + 'raw_images' if not os.path.exists(raw_img_path): os.makedirs(raw_img_path, mode = 511) print "\n"'Raw image folder created' flusher() else: raw_img_path = False if geopng: if linear_interp.isChecked(): corr_img_path = dirname + '/' + 'Linear' + '/' + 'corr_images' else: corr_img_path = dirname + '/' + 'Cubic' + '/' + 'corr_images' if not os.path.exists(corr_img_path): os.makedirs(corr_img_path, mode = 511) print "\n"'Calibrated image folder created' flusher() else: corr_img_path = False if gisgrd: if linear_interp.isChecked(): raw_gis_path = dirname + '/' + 'Linear' + '/' + 'raw_gis_grids' else: raw_gis_path = dirname + '/' + 'Cubic' + '/' + 'raw_gis_grids' if not os.path.exists(raw_gis_path): os.makedirs(raw_gis_path, mode = 511) print "\n"'Raw GIS grid folder created' flusher() else: raw_gis_path = False if gisgrd: if linear_interp.isChecked(): corr_gis_path = dirname + '/' + 'Linear' + '/' + 'corr_gis_grids' else: corr_gis_path = dirname + '/' + 'Cubic' + '/' + 'corr_gis_grids' if not os.path.exists(corr_gis_path): os.makedirs(corr_gis_path, mode = 511) print "\n"'Calibrated GIS grid folder created' flusher() else: corr_gis_path = False if surf: if linear_interp.isChecked(): raw_surf_path = dirname + '/' + 'Linear' + '/' + 'raw_surfer_grids' else: raw_surf_path = dirname + '/' + 'Cubic' + '/' + 'raw_surfer_grids' if not os.path.exists(raw_surf_path): os.makedirs(raw_surf_path, mode = 511) print "\n"'Raw surfer grid folder created' flusher() else: raw_surf_path = False if surf: if linear_interp.isChecked(): corr_surf_path = dirname + '/' + 'Linear' + '/' + 'corr_surfer_grids' else: corr_surf_path = dirname + '/' + 'Cubic' + '/' + 'corr_surfer_grids' if not os.path.exists(corr_surf_path): os.makedirs(corr_surf_path, mode = 511) print "\n"'Calibrated surfer grid folder created' flusher() else: corr_surf_path = False #loads data from datafile into data array if utm: data = np.loadtxt(datafile, skiprows=1, usecols =(0,1,2,3,4,5,6,7,8)) elif wgs: data = np.loadtxt(datafile, dtype = object, skiprows=1, usecols =(0,1,2,3,4,5,6,7,8)) data = rawtodd(data) else: data = np.loadtxt(datafile, skiprows=1, usecols =(0,1,2,3,4,5,6,7,8)) #Convert data to OS for use in GIS osgb36=pyproj.Proj("+init=EPSG:27700") UTM30N=pyproj.Proj("+init=EPSG:32630") wgs84=pyproj.Proj("+init=EPSG:4326") n_data = data[:,0] e_data = data[:,1] if utm: data_coord = np.array(pyproj.transform(UTM30N, osgb36, e_data, n_data)).T data = np.column_stack((data_coord, data[:,2:9])) print "\n"'UTM data loaded' flusher() elif wgs: data_coord = np.array(pyproj.transform(wgs84, osgb36, e_data, n_data)).T data = np.column_stack((data_coord, data[:,2:9])) print "\n"'WGS84 data loaded' flusher() else: data = data data_coord = data[:,0:2] print "\n"'OS Data loaded' flusher() if not despike: print "\n" 'Data not despiked' flusher() if shape: raw_shape_out(data, dirname, filename, raw_shape_path) print "\n"'Shapes created' flusher() plot_data(filename, data, x_var, y_var, intsp_x, intsp_y, dirname, arcgrdval, data_match, geopng, gisgrd, surf, topo, blank, raw_img_path, raw_gis_path, topo_path, raw_surf_path) output_orig = csv_path + '/' + filename[0:-4] + '_OS_transform.csv' outputOS = open(output_orig, 'w') np.savetxt(outputOS, data, fmt = '%8.3f', delimiter = ',', header = 'Eastings, Northings,Altitude,C1,I1,C2,I2,C3,I3', comments = '') print "\n" 'OS transformed raw data CSV exported' flusher() #loads data from calibration file into array if utm: calb = np.loadtxt(calbfile, skiprows=1, usecols =(0,1,2,3,4,5,6,7,8)) elif wgs: calb = np.loadtxt(calbfile, dtype = object, skiprows=1, usecols =(0,1,2,3,4,5,6,7,8)) calb = calbtodd(calb) else: calb = np.loadtxt(calbfile, skiprows=1, usecols =(0,1,2,3,4,5,6,7,8)) #Convert calibration to OS for use in GIS n_calb = calb[:,0] e_calb = calb[:,1] if utm: calb_coord = np.array(pyproj.transform(UTM30N, osgb36, e_calb, n_calb)).T calb = np.column_stack((calb_coord, calb[:,2:9])) print "\n"'UTM calibration loaded' flusher() elif wgs: calb_coord = np.array(pyproj.transform(wgs84, osgb36, e_calb, n_calb)).T calb = np.column_stack((calb_coord, calb[:,2:9])) print "\n"'WGS84 calibration loaded' flusher() else: calb = calb calb_coord = calb[:,0:2] print 'calb coord', calb_coord print "\n"'OS calibration loaded' flusher() set_status_var('loaded into arrays') print 'loaded into arrays' flusher() #initiates spike removal for calibration and data arrays if despike: print "\n" 'Despiking data' flusher() data = spike_removal(data[:,0:3],data[:,3:9], 2, 2) calb = spike_removal(calb[:,0:3],calb[:,3:9], 3, 2) print "\n"'Despike completed' flusher() if shape: raw_shape_out(data, dirname, filename, raw_shape_path) print "\n"'Shapes created' flusher() plot_data(filename, data, x_var, y_var, intsp_x, intsp_y, dirname, arcgrdval, data_match, geopng, gisgrd, surf, topo, blank, raw_img_path, raw_gis_path, topo_path, raw_surf_path) #produces arrays containing only coordinates data_coord = np.array(data[:,0:3]) calb_coord = np.array(calb[:,0:3]) #Corrects data against drift calibration file out_array = drift_calb(data_coord,data,calb_coord,calb,calb_interp,buff,csv_path) print "\n"'Calibration completed' flusher() output = open(output_name, 'w') header = 'Eastings, Northings,Altitude,C1,I1,C2,I2,C3,I3' print>>output, header if shape: print "\n"'Shapes created' flusher() corr_shape_out(out_array, dirname, filename, raw_shape_path) np.savetxt(output, out_array, fmt = '%8.3f', delimiter=',') print "\n"'OS transformed calibrated data saved' flusher() topo = False plot_corr(out_array, filename, x_var, y_var, intsp_x, intsp_y, dirname, arcgrdval, geopng, gisgrd, surf, topo, blank, corr_img_path, corr_gis_path, topo_path, corr_surf_path) #The last thing that happens! plt.show() ########## #Is any of the following doing anything??? '''
def __init__(self, net_lats, net_lons): poly_x, poly_y = pyproj.transform(wgs84, pj_laea, net_lons, net_lats) self.polygon = MultiPoint(zip(poly_x, poly_y)).convex_hull
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'Snow_Cover_Daily_Tile' if USE_GDAL: import gdal GRID_NAME = 'MOD_Grid_Snow_500m' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format( FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray() # Construct the grid. meta = gdset.GetMetadata() x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:, :].astype(np.float64) # Read global attribute. fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. ul_regex = re.compile( r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile( r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape xinc = (x1 - x0) / nx yinc = (y1 - y0) / ny x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(sinu, wgs84, xv, yv) # There's a wraparound issue for the longitude, as part of the tile extends # over the international dateline, and pyproj wraps longitude values west # of 180W (< -180) into positive territory. Basemap's pcolormesh method # doesn't like that. lon[lon > 0] -= 360 m = Basemap(projection='cyl', resolution='h', lon_0=-10, llcrnrlat=-5, urcrnrlat=30, llcrnrlon=-185, urcrnrlon=-150) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(0, 21, 10), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, -159, 10), labels=[0, 0, 0, 1]) # Use a discretized colormap since we have only four levels. # fill, ocean, no snow, missing # cmap = mpl.colors.ListedColormap(['black', 'blue', 'green', 'grey']) cmap = mpl.colors.ListedColormap(['grey', 'green', 'blue', 'black']) bounds = [0, 25, 39, 255, 256] norm = mpl.colors.BoundaryNorm(bounds, cmap.N) # 2400x2400 seems to be too much, so we'll subset it. m.pcolormesh(lon[::2, ::2], lat[::2, ::2], data[::2, ::2], latlon=True, cmap=cmap, norm=norm) color_bar = plt.colorbar() color_bar.set_ticks([12, 32, 147, 255.5]) # color_bar.set_ticklabels(['fill', 'ocean', 'no snow', 'missing']) color_bar.set_ticklabels(['missing', 'no snow', 'ocean', 'fill']) color_bar.draw_all() basename = os.path.basename(FILE_NAME) long_name = 'Snow Cover Tile' plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): DATAFIELD_NAME = '500m 16 days NDVI' if USE_GDAL: # Gdal import gdal GRID_NAME = 'MODIS_Grid_16DAY_500m_VI' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format( FILE_NAME, GRID_NAME, DATAFIELD_NAME) # Subset the data by a factor of 4 so that low-memory machines can # render it more easily. gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64)[::4, ::4] # Get any needed attributes. meta = gdset.GetMetadata() scale_factor = np.float(meta['scale_factor']) add_offset = np.float(meta['add_offset']) _FillValue = np.float(meta['_FillValue']) valid_range = [np.float(x) for x in meta['valid_range'].split(', ')] units = meta['units'] long_name = meta['long_name'] # Construct the grid, remembering to subset by a factor of 4. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x[::4], y[::4]) del gdset else: if USE_NETCDF: from netCDF4 import Dataset # The scaling equation isn't "scale * data + offset", # so turn automatic scaling off. nc = Dataset(FILE_NAME) ncvar = nc.variables[DATAFIELD_NAME] ncvar.set_auto_maskandscale(False) data = ncvar[:].astype(np.float64) # Get any needed attributes. scale_factor = ncvar.scale_factor add_offset = ncvar.add_offset _FillValue = ncvar._FillValue valid_range = ncvar.valid_range units = ncvar.units long_name = ncvar.long_name gridmeta = getattr(nc, 'StructMetadata.0') else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:, :].astype(np.double) # Read attributes. attrs = data2D.attributes(full=1) lna = attrs["long_name"] long_name = lna[0] vra = attrs["valid_range"] valid_range = vra[0] aoa = attrs["add_offset"] add_offset = aoa[0] fva = attrs["_FillValue"] _FillValue = fva[0] sfa = attrs["scale_factor"] scale_factor = sfa[0] ua = attrs["units"] units = ua[0] fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. In addition, the grid is in packed decimal # degrees, so we need to normalize to degrees. ul_regex = re.compile( r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile( r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape x = np.linspace(x0, x1, nx) y = np.linspace(y0, y1, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons so we can use a local # projection. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(sinu, wgs84, xv, yv) # Apply the attributes to the data. invalid = np.logical_or(data < valid_range[0], data > valid_range[1]) invalid = np.logical_or(invalid, data == _FillValue) data[invalid] = np.nan data = (data - add_offset) / scale_factor data = np.ma.masked_array(data, np.isnan(data)) # A plain geographic projection looks a little warped at this scale and # latitude, so use a Lambert Azimuthal Equal Area projection instead. m = Basemap(projection='laea', resolution='l', lat_ts=35, lat_0=35, lon_0=-92.5, width=2500000, height=2000000) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(30, 45, 5), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-105, -75, 5), labels=[0, 0, 0, 1]) m.pcolormesh(lon[::2, ::2], lat[::2, ::2], data[::2, ::2], latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def cluster_rounded(df, original_users, keyword): rounddigits = 0 if '3' in keyword: rounddigits = 3 elif '2' in keyword: rounddigits = 2 else: printmsg("wrong radius from keyword, exiting") exit(-1) dfgrouped = df.groupby(['User ID']) mylist = [] for name, group in dfgrouped: tmptags = [] nodeids = [] this_user_multipolygon = [] this_user_multipolygon_3857 = [] coords = [] nodesnum = 0 print name group = group.groupby( ['Latitude', 'Longitude']).size().reindex().sort_values().tail(maxpois) for i in group.iteritems(): tmp_points = return_square_verticres((i[0][0], i[0][1]), rounddigits) this_user_multipolygon.append(Polygon(tmp_points)) xorig = [] yorig = [] for point in tmp_points: xorig.append(point[0]) yorig.append(point[1]) lonsorig, latsorig = pyproj.transform(wgs84, osm3857, yorig, xorig) coords_3857 = zip(latsorig, lonsorig) this_user_multipolygon_3857.append(Polygon(coords_3857)) this_user_multipolygon_wgs84 = MultiPolygon(this_user_multipolygon) this_user_multipolygon_wgs84 = this_user_multipolygon_wgs84.buffer( 0) this_user_multipolygon_3857 = MultiPolygon( this_user_multipolygon_3857) this_user_multipolygon_3857 = this_user_multipolygon_3857.buffer(0) if "MultiPolygon" not in str(type(this_user_multipolygon_wgs84)): # plot polygon extorig = this_user_multipolygon_wgs84.exterior.xy lats = extorig[0] lons = extorig[1] # query osm c = zip(lats, lons) textquery = string_format(c) result = querypoly(textquery) try: if result != 0: for j in result.nodes: tmptags.append(j.tags) nodeids.append(result.node_ids) nodesnum = result.nodes.__len__() except ValueError: pass else: for i in this_user_multipolygon_wgs84: extorig = i.exterior.xy lats = extorig[0] lons = extorig[1] c = zip(lats, lons) textquery = string_format(c) result = querypoly(textquery) print textquery print '\n' try: if result != 0: for j in result.nodes: tmptags.append(j.tags) nodeids.append(result.node_ids) nodesnum += result.nodes.__len__() except ValueError: pass atomic_operation(this_user_multipolygon_3857, name, tmptags, nodesnum, nodeids)
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'Sea_Ice_by_Reflectance' if USE_GDAL: import gdal GRID_NAME = 'MOD_Grid_Seaice_1km' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format( FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray() # Construct the grid. meta = gdset.GetMetadata() x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:, :].astype(np.float64) # Read global attribute. fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. ul_regex = re.compile( r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile( r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape xinc = (x1 - x0) / nx yinc = (y1 - y0) / ny x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) # Reproject the coordinates out of lamaz into lat/lon. lamaz = pyproj.Proj("+proj=laea +a=6371228 +lat_0=-90 +lon_0=0 +units=m") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(lamaz, wgs84, xv, yv) # Southern hemisphere lambert equal area projection. m = Basemap(projection='laea', resolution='l', lat_ts=-70, lat_0=-70, lon_0=-60, width=2500000, height=2500000) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-90, -50, 10), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-100, -10, 20), labels=[0, 0, 0, 1]) # Use a discretized colormap since we have only a few levels. # 0=missing data # 1=no decision # 11=night # 25=land # 37=inland water # 39=ocean # 50=cloud # 200=sea ice # 253=no input tile expected # 254=non-production mask" # 255=fill lst = [ '#727272', '#b7b7b7', '#ffff96', '#00ff00', '#232375', '#232375', '#63c6ff', '#ff0000', '#3f3f3f', '#000000', '#000000' ] cmap = mpl.colors.ListedColormap(lst) bounds = [0, 1, 11, 25, 37, 39, 50, 200, 253, 254, 255, 256] norm = mpl.colors.BoundaryNorm(bounds, cmap.N) m.pcolormesh(lon, lat, data, latlon=True, cmap=cmap, norm=norm) color_bar = plt.colorbar() color_bar.set_ticks( [0.5, 5.5, 18, 31, 38, 44.5, 125, 226.5, 253.5, 254.5, 255.5]) color_bar.set_ticklabels([ 'missing', 'no decision', 'night', 'land', 'inland water', 'ocean', 'cloud', 'sea ice', 'no input tile\nexpected', 'non-production\nmask', 'fill' ]) color_bar.draw_all() basename = os.path.basename(FILE_NAME) long_name = DATAFIELD_NAME plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def transformCoord((x, y)): WGS84 = pyproj.Proj(init='epsg:4326') Lambert2 = pyproj.Proj(init='epsg:27572') lons, lats = pyproj.transform(Lambert2, WGS84, x, y) return (lats, lons)
#) #more southerly selection, tested 9 Apr 2019 print 'Transforming coordinates of cauldron periphery' wgs84 = pyproj.Proj( "+init=EPSG:4326") # LatLon with WGS84 datum used by ArcticDEM hjorsey = pyproj.Proj( "+init=EPSG:3056" ) # UTM zone 28N for Iceland, so that coords will be in km northing/easting #xt, yt = pyproj.transform(wgs84, hjorsey, cauldron_periphery[:,0], cauldron_periphery[:,1]) #cauldron_periph_utm = np.asarray([(xt[i], yt[i]) for i in range(len(xt))]) #transformed_mp = MultiPoint(cauldron_periph_utm) #cauldron_center = transformed_mp.centroid #finding UTM coordinates of cauldron center cauldron_center_latlon = (-17.5158322984487, 64.48773309819093 ) #ginput selection of deepest point cauldron_center = Point( pyproj.transform(wgs84, hjorsey, cauldron_center_latlon[0], cauldron_center_latlon[1])) cauldron_radius = 1540 #m, based on previous calculation with centroid nradii = 99 res = int( np.floor(nradii / 4) ) #choose resolution for Shapely buffer based on how many sample points we want #cauldron_radius = np.mean([cauldron_center.distance(p) for p in transformed_mp]) radial_buffer = cauldron_center.buffer( distance=cauldron_radius, resolution=res) #set of points at distance R from the centroid radial_pts = np.asarray(list(radial_buffer.exterior.coords)) rpx, rpy = pyproj.transform(hjorsey, wgs84, radial_pts[:, 0], radial_pts[:, 1]) radial_pts_latlon = np.asarray([(rpx[i], rpy[i]) for i in range(len(rpx))]) #center_latlon = pyproj.transform(hjorsey, wgs84, list(cauldron_center.coords)[0][0], list(cauldron_center.coords)[0][1]) center_latlon = cauldron_center_latlon
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'Extent' if USE_GDAL: import gdal GRID_NAME = 'Southern Hemisphere' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format( FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray() meta = gdset.GetMetadata() x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. Dataset name 'Extent' exists under different groups. # Use reference number to resolve ambiguity. data2D = hdf.select(hdf.reftoindex(12)) data = data2D[:, :].astype(np.float64) # Read global attribute. fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. ul_regex = re.compile( r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile( r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape xinc = (x1 - x0) / nx yinc = (y1 - y0) / ny x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) # Reproject into WGS84 lamaz = pyproj.Proj("+proj=laea +a=6371228 +lat_0=-90 +lon_0=0 +units=m") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(lamaz, wgs84, xv, yv) # Use a south polar azimuthal equal area projection. m = Basemap(projection='splaea', resolution='l', boundinglat=-60, lon_0=0) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-90, 0, 15), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 180, 30), labels=[0, 0, 0, 1]) # Bin the data as follows: # 0 -- snow-free land # 1-20% sea ice -- blue # 21-40% sea ice -- blue-cyan # 41-60% sea ice -- blue # 61-80% sea ice -- cyan-blue # 81-100% sea ice -- cyan # 101 -- permanent ice # 103 -- dry snow # 252 mixed pixels at coastlines # 255 ocean lst = [ '#004400', '#0000ff', '#0044ff', '#0088ff', '#00ccff', '#00ffff', '#ffffff', '#440044', '#191919', '#000000', '#8888cc' ] cmap = mpl.colors.ListedColormap(lst) bounds = [0, 1, 21, 41, 61, 81, 101, 103, 104, 252, 255] tickpts = [0.5, 11, 31, 51, 71, 91, 102, 103.5, 178, 253.5] norm = mpl.colors.BoundaryNorm(bounds, cmap.N) # The corners cause trouble, so chop them out. idx = slice(5, 721) m.pcolormesh(lon[idx, idx], lat[idx, idx], data[idx, idx], latlon=True, cmap=cmap, norm=norm) color_bar = plt.colorbar() color_bar.set_ticks(tickpts) color_bar.set_ticklabels([ 'snow-free\nland', '1-20% sea ice', '21-40% sea ice', '41-60% sea ice', '61-80% sea ice', '81-100% sea ice', 'permanent\nice', 'dry\nsnow', 'mixed pixels\nat coastlines', 'ocean' ]) color_bar.draw_all() basename = os.path.basename(FILE_NAME) long_name = DATAFIELD_NAME plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.1.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): DATAFIELD_NAME = 'Gpp_1km' if USE_GDAL: import gdal GRID_NAME = 'MOD_Grid_MOD17A2' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Get any needed attributes. meta = gdset.GetMetadata() scale_factor = np.float(meta['scale_factor']) add_offset = np.float(meta['add_offset']) _FillValue = np.float(meta['_FillValue']) valid_range = [np.float(x) for x in meta['valid_range'].split(', ')] units = meta['units'] long_name = meta['long_name'] # Construct the grid. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) x = np.linspace(x0, x0 + xinc*nx, nx) y = np.linspace(y0, y0 + yinc*ny, ny) xv, yv = np.meshgrid(x, y) del gdset else: if USE_NETCDF: from netCDF4 import Dataset nc = Dataset(FILE_NAME) ncvar = nc.variables[DATAFIELD_NAME] # The scaling equation isn't "scale * data + offset", # so turn automatic scaling off. ncvar.set_auto_maskandscale(False) data = ncvar[:].astype(np.float64) # Get any needed attributes. scale_factor = ncvar.scale_factor add_offset = ncvar.add_offset _FillValue = ncvar._FillValue valid_range = ncvar.valid_range units = ncvar.units long_name = ncvar.long_name gridmeta = getattr(nc, 'StructMetadata.0') else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.double) # Read attributes. attrs = data2D.attributes(full=1) lna=attrs["long_name"] long_name = lna[0] vra=attrs["valid_range"] valid_range = vra[0] aoa=attrs["add_offset"] add_offset = aoa[0] fva=attrs["_FillValue"] _FillValue = fva[0] sfa=attrs["scale_factor"] scale_factor = sfa[0] ua=attrs["units"] units = ua[0] fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. In addition, the grid is in packed decimal # degrees, so we need to normalize to degrees. ul_regex = re.compile(r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile(r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape x = np.linspace(x0, x1, nx) y = np.linspace(y0, y1, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons so we can use a local # projection. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(sinu, wgs84, xv, yv) # Apply the attributes to the data. invalid = np.logical_or(data < valid_range[0], data > valid_range[1]) invalid = np.logical_or(invalid, data ==_FillValue) data[invalid] = np.nan data = (data - add_offset) * scale_factor data = np.ma.masked_array(data, np.isnan(data)) m = Basemap(projection='cyl', resolution='l', llcrnrlat=2.5, urcrnrlat=12.5, llcrnrlon=-87.5, urcrnrlon = -77.5) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(0, 15, 5), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-90, 75, 5), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): DATAFIELD_NAME = 'Sea_Ice_by_Reflectance_SP' if USE_NETCDF4: from netCDF4 import Dataset nc = Dataset(FILE_NAME) ncvar = nc.variables[DATAFIELD_NAME] data = ncvar[:].astype(np.float64) gridmeta = getattr(nc, 'StructMetadata.0') else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.float64) # Read global attribute. fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. ul_regex = re.compile(r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile(r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape x = np.linspace(x0, x1, nx) y = np.linspace(y0, y1, ny) xv, yv = np.meshgrid(x, y) # Reproject into latlon # Reproject the coordinates out of lamaz into lat/lon. lamaz = pyproj.Proj("+proj=laea +a=6371228 +lat_0=-90 +lon_0=0 +units=m") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(lamaz, wgs84, xv, yv) # Use a south polar azimuthal equal area projection. m = Basemap(projection='splaea', resolution='l', boundinglat=-20, lon_0=180) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-90, 0, 15), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 180, 45), labels=[0, 0, 0, 1]) # Use a discretized colormap since we have only a few levels. # 0=missing data # 1=no decision # 11=night # 25=land # 37=inland water # 39=ocean # 50=cloud # 200=sea ice # 253=no input tile expected # 254=non-production mask" lst = ['#727272', '#b7b7b7', '#ffff96', '#00ff00', '#232375', '#232375', '#63c6ff', '#ff0000', '#3f3f3f', '#000000'] cmap = mpl.colors.ListedColormap(lst) bounds = [0, 1, 11, 25, 37, 39, 50, 200, 253, 254, 255] norm = mpl.colors.BoundaryNorm(bounds, cmap.N) # Render only a subset of the mesh. rows = slice(500, 4000, 5) cols = slice(500, 4000, 5) m.pcolormesh(lon[rows,cols], lat[rows,cols], data[rows,cols], latlon=True, cmap=cmap, norm=norm) color_bar = plt.colorbar() color_bar.set_ticks([0.5, 5.5, 18, 31, 38, 44.5, 125, 226.5, 253.5, 254.5]) color_bar.set_ticklabels(['missing', 'no decision', 'night', 'land', 'inland water', 'ocean', 'cloud', 'sea ice', 'no input tile\nexpected', 'non-production\nmask']) color_bar.draw_all() basename = os.path.basename(FILE_NAME) long_name = DATAFIELD_NAME plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'SI_12km_SH_36H_DAY' if USE_GDAL: import gdal GRID_NAME = 'SpPolarGrid12km' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Read projection parameters from global attribute. meta = gdset.GetMetadata() x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.float64) # There are multiple girds in this file. # Thus, simple regular expression search for # UpperLeft/LowerRightPoint from StructMetadata.0 won't work. # # Use HDFView and look for the following parameters: # # GROUP=GRID_2 # GridName="SpPolarGrid06km" # XDim=1264 # YDim=1328 # UpperLeftPointMtrs=(-3950000.000000,4350000.000000) # LowerRightMtrs=(3950000.000000,-3950000.000000) ny, nx = data.shape x1 = 3950000 x0 = -3950000 y0 = 4350000 y1 = -3950000 xinc = (x1 - x0) / nx yinc = (y1 - y0) / ny # Apply the attributes information. # Ref: http://nsidc.org/data/docs/daac/ae_si12_12km_seaice/data.html data[data == 0] = np.nan data *= 0.1 data = np.ma.masked_array(data, np.isnan(data)) x = np.linspace(x0, x0 + xinc*nx, nx) y = np.linspace(y0, y0 + yinc*ny, ny) xv, yv = np.meshgrid(x, y) args = ["+proj=stere", "+lat_0=-90", "+lon_0=0", "+lat_ts=-70", "+k=1", "+es=0.006693883", "+a=6378273", "+x_0=0", "+y_0=0", "+ellps=WGS84", "+datum=WGS84"] pstereo = pyproj.Proj(' '.join(args)) wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(pstereo, wgs84, xv, yv) units = 'K' long_name = DATAFIELD_NAME m = Basemap(projection='spstere', resolution='l', boundinglat=-45, lon_0 = 0) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-80, 0, 20), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 181, 30), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.1.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = "Nadir_Reflectance" if USE_GDAL: GRID_NAME = "MOD_Grid_BRDF" import gdal gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) data = data[0, :, :] # Construct the grid. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(sinu, wgs84, xv, yv) # Read the attributes. meta = gdset.GetMetadata() long_name = meta["long_name"] units = meta["units"] _FillValue = np.float(meta["_FillValue"]) scale_factor = np.float(meta["scale_factor"]) add_offset = np.float(meta["add_offset"]) valid_range = [np.float(x) for x in meta["valid_range"].split(", ")] del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data3D = hdf.select(DATAFIELD_NAME) data = data3D[:, :, 0].astype(np.double) # Read geolocation dataset from HDF-EOS2 dumper output. GEO_FILE_NAME = "lat_MOD43B4.A2006353.h15v15.004.2007006030047.output" GEO_FILE_NAME = os.path.join(os.environ["HDFEOS_ZOO_DIR"], GEO_FILE_NAME) lat = np.genfromtxt(GEO_FILE_NAME, delimiter=",", usecols=[0]) lat = lat.reshape(data.shape) GEO_FILE_NAME = "lon_MOD43B4.A2006353.h15v15.004.2007006030047.output" GEO_FILE_NAME = os.path.join(os.environ["HDFEOS_ZOO_DIR"], GEO_FILE_NAME) lon = np.genfromtxt(GEO_FILE_NAME, delimiter=",", usecols=[0]) lon = lon.reshape(data.shape) # Read attributes. attrs = data3D.attributes(full=1) lna = attrs["long_name"] long_name = lna[0] vra = attrs["valid_range"] valid_range = vra[0] fva = attrs["_FillValue"] _FillValue = fva[0] sfa = attrs["scale_factor"] scale_factor = sfa[0] aoa = attrs["add_offset"] add_offset = aoa[0] ua = attrs["units"] units = ua[0] invalid = np.logical_or(data > valid_range[1], data < valid_range[0]) invalid = np.logical_or(invalid, data == _FillValue) data[invalid] = np.nan data = scale_factor * (data - add_offset) data = np.ma.masked_array(data, np.isnan(data)) m = Basemap(projection="laea", resolution="l", lat_ts=-65, lat_0=-65, lon_0=-65, width=1250000, height=1250000) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-70, -50, 5), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-95, -35, 10), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title("{0}\n{1} at Num_Land_Bands=0".format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): DATAFIELD_NAME = 'sur_refl_b01_1' if USE_NETCDF: from netCDF4 import Dataset # The scaling equation isn't what netcdf4 expects, so turn it off. nc = Dataset(FILE_NAME) ncvar = nc.variables[DATAFIELD_NAME] ncvar.set_auto_maskandscale(False) data = ncvar[:].astype(np.float64) # Get any needed attributes. scale_factor = ncvar.scale_factor add_offset = ncvar.add_offset _FillValue = ncvar._FillValue valid_range = ncvar.valid_range units = ncvar.units long_name = ncvar.long_name # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. gridmeta = getattr(nc, 'StructMetadata.0') ul_regex = re.compile( r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile( r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) nx, ny = data.shape x = np.linspace(x0, x1, nx) y = np.linspace(y0, y1, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(sinu, wgs84, xv, yv) elif USE_GDAL: # GDAL import gdal GRID_NAME = 'MODIS_Grid_2D' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format( FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Get any needed attributes. meta = gdset.GetMetadata() scale_factor = np.float(meta['scale_factor']) add_offset = np.float(meta['add_offset']) _FillValue = np.float(meta['_FillValue']) valid_range = [np.float(x) for x in meta['valid_range'].split(', ')] units = meta['units'] long_name = meta['long_name'] # Construct the grid. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(sinu, wgs84, xv, yv) del gdset else: # PyHDF from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:, :].astype(np.double) # Read geolocation dataset from HDF-EOS2 dumper output. GEO_FILE_NAME = 'lat_MYD09GQ.A2012246.h35v10.005.2012248075505.output' GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) lat = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) lat = lat.reshape(data.shape) GEO_FILE_NAME = 'lon_MYD09GQ.A2012246.h35v10.005.2012248075505.output' GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) lon = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) lon = lon.reshape(data.shape) # Read attributes. attrs = data2D.attributes(full=1) lna = attrs["long_name"] long_name = lna[0] vra = attrs["valid_range"] valid_range = vra[0] fva = attrs["_FillValue"] _FillValue = fva[0] sfa = attrs["scale_factor"] scale_factor = sfa[0] ua = attrs["units"] units = ua[0] aoa = attrs["add_offset"] add_offset = aoa[0] # Apply the attributes to the data. invalid = np.logical_or(data < valid_range[0], data > valid_range[1]) invalid = np.logical_or(invalid, data == _FillValue) data[invalid] = np.nan data = (data - add_offset) / scale_factor data = np.ma.masked_array(data, np.isnan(data)) # There is a wrap-around issue to deal with, as some of the grid extends # eastward over the international dateline. Adjust the longitude to avoid # a smearing effect. lon[lon < 0] += 360 m = Basemap(projection='cyl', resolution='l', llcrnrlat=-22.5, urcrnrlat=-7.5, llcrnrlon=167.5, urcrnrlon=192.5) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-20, -5, 5), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(170, 200, 10), labels=[0, 0, 0, 1]) # Data too big for plotting? Nothing will show. # m.pcolormesh(lon, lat, data, latlon=True) m.pcolormesh(lon[::2, ::2], lat[::2, ::2], data[::2, ::2], latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
for fn in os.listdir(PATH): if fn.startswith("new_"): c += 1 with open(PATH + fn, "rb") as f: # 3 lists for each file t = ([], [], []) for raw_line in f: l = raw_line.split() # clean the data types #lon, lat, time line = [float(l[0]), float(l[1]), int(l[3])] if line[2] < t_min: t_min = line[2] if line[2] > t_max: t_max = line[2] x, y = pyproj.transform(wgs84, epsg3493, line[1], line[0]) t[0].append(x) t[1].append(y) t[2].append(line[2]) # saving the trace of this particular cab in the dic d[fn[4:]] = t # empty list of lists, each list containing a "line" (v_id x y) td = [] times = list(range(t_min, t_max)) times0 = list(range(0, t_max-t_min,10)) with open(PATH + "processed", "wb") as f:
wgs84 = pyproj.Proj("+init=EPSG:4326") i = 0 lat = [] lon = [] #1 foot = 0.3048 meters conv = 0.3048 with open("trip_data_1_next_trip_start_location.txt") as f: next(f) for line in f: i += 1 # print line strings = line.split(",") co1 = float(strings[0]) co2 = float(strings[1]) x2, y2 = pyproj.transform(wgs84, isn2004, co1, co2) lat.append(x2) lon.append(y2) # if i == 14450: # break if i == 1169120: break x1 = lat y1 = lon plt.plot(x1, y1, 'o', color='blue', markersize=7, markeredgewidth=0.0) plt.show()
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'SI_25km_NH_06V_ASC' if USE_GDAL: import gdal GRID_NAME = 'NpPolarGrid25km' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format( FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Read projection parameters from global attribute. meta = gdset.GetMetadata() x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:, :].astype(np.float64) # Read global attribute. fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. ul_regex = re.compile( r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile( r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape xinc = (x1 - x0) / nx yinc = (y1 - y0) / ny # Apply the attributes information. # Ref: http://nsidc.org/data/docs/daac/ae_si12_25km_seaice/data.html data[data == 0] = np.nan data *= 0.1 data = np.ma.masked_array(data, np.isnan(data)) # Construct the grid. Reproject out of the GCTP stereographic into lat/lon. x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) args = [ "+proj=stere", "+lat_0=90", "+lon_0=-45", "+lat_ts=70", "+k=1", "+es=0.006693883", "+a=6378273", "+x_0=0", "+y_0=0", "+ellps=WGS84", "+datum=WGS84" ] pstereo = pyproj.Proj(' '.join(args)) wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(pstereo, wgs84, xv, yv) units = 'K' long_name = DATAFIELD_NAME m = Basemap(projection='npstere', resolution='l', boundinglat=30, lon_0=0) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(0, 91, 20), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 181, 45), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'SI_12km_NH_ICECON_DAY' from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.float64) # Read global attribute. fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. ul_regex = re.compile(r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile(r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape xinc = (x1 - x0) / nx yinc = (y1 - y0) / ny # Handle land mask. # http://nsidc.org/data/docs/daac/ae_si12_12km_seaice/data.html data[data > 100.0] = np.nan data = np.ma.masked_array(data, np.isnan(data)) # Construct the grid. # Reproject out of the GCTP stereographic into lat/lon. x = np.linspace(x0, x0 + xinc*nx, nx) y = np.linspace(y0, y0 + yinc*ny, ny) xv, yv = np.meshgrid(x, y) args = ["+proj=stere", "+lat_0=90", "+lon_0=-45", "+lat_ts=70", "+k=1", "+es=0.006693883", "+a=6378273", "+x_0=0", "+y_0=0", "+ellps=WGS84", "+datum=WGS84"] pstereo = pyproj.Proj(' '.join(args)) wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(pstereo, wgs84, xv, yv) units = 'Percent' long_name = DATAFIELD_NAME m = Basemap(projection='npstere', resolution='l', boundinglat=30, lon_0 = 0) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(0, 91, 20), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 181, 45), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.n.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'Albedo_BSA_Band1' if USE_GDAL: import gdal GRID_NAME = 'MOD_Grid_BRDF' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Construct the grid. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) x = np.linspace(x0, x0 + xinc*nx, nx) y = np.linspace(y0, y0 + yinc*ny, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(sinu, wgs84, xv, yv) # Read the attributes. meta = gdset.GetMetadata() long_name = meta['long_name'] units = meta['units'] _FillValue = np.float(meta['_FillValue']) scale_factor = np.float(meta['scale_factor']) add_offset = np.float(meta['add_offset']) valid_range = [np.float(x) for x in meta['valid_range'].split(', ')] del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.double) # Read geolocation dataset from HDF-EOS2 dumper output. GEO_FILE_NAME = 'lat_MCD43A3.A2013305.h12v11.005.2013322102420.output' GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) lat = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) lat = lat.reshape(data.shape) GEO_FILE_NAME = 'lon_MCD43A3.A2013305.h12v11.005.2013322102420.output' GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) lon = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) lon = lon.reshape(data.shape) # Read attributes. attrs = data2D.attributes(full=1) lna=attrs["long_name"] long_name = lna[0] vra=attrs["valid_range"] valid_range = vra[0] aoa=attrs["add_offset"] add_offset = aoa[0] fva=attrs["_FillValue"] _FillValue = fva[0] sfa=attrs["scale_factor"] scale_factor = sfa[0] ua=attrs["units"] units = ua[0] invalid = data == _FillValue invalid = np.logical_or(invalid, data < valid_range[0]) invalid = np.logical_or(invalid, data > valid_range[1]) data[invalid] = np.nan data = scale_factor * (data - add_offset) data = np.ma.masked_array(data, np.isnan(data)) m = Basemap(projection='cyl', resolution='l', lon_0=-10, llcrnrlat=-32.5, urcrnrlat = -17.5, llcrnrlon=-72.5, urcrnrlon = -52.5) m.drawcoastlines(linewidth=1.0) m.drawparallels(np.arange(-30, -10, 5), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-70, -50, 5), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data) # Subset if you want to speed up processing. # m.pcolormesh(lon[::2], lat[::2], data[::2]) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}\n'.format(basename, long_name), fontsize=11) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
if args.proj!='': print "Projecting the nodes coordinates" import mpl_toolkits.basemap.pyproj as pyproj lla = pyproj.Proj(proj='latlong', ellps='WGS84', datum='WGS84') if args.proj[0]!='geocent': sProj = "+init=%s" %args.proj[0] myproj=pyproj.Proj(sProj) else: myproj = pyproj.Proj(proj='geocent', ellps='WGS84', datum='WGS84') else: print "no projection carried out" fout = open(args.output_file,'w') ### WRITE THE GOCAD TS FILE fout.write("GOCAD TSURF 1\nHEADER {\nname:"+args.objectname+"\n}\nTRIANGLES\n") for j in range(0,NY): for i in range(0,NX): if args.proj!='': xyz = pyproj.transform(lla, myproj, dataxyz[i,j,0],dataxyz[i,j,1], dataxyz[i,j,2], radians=False) fout.write('VRTX '+str(i+j*NX+1)+' %.10e %.10e %.10e\n' %tuple(xyz)) else: fout.write("VRTX %d %f %f %f\n" %(i+j*NX+1, dataxyz[i,j,0], dataxyz[i,j,1], dataxyz[i,j,2])) for tr in triangles: fout.write("TRGL %d %d %d\n" %(tr[0],tr[1],tr[2])) fout.write("END") fout.close()
lla = pyproj.Proj(proj="latlong", ellps="WGS84", datum="WGS84") if args.proj[0] != "geocent": sProj = args.proj[0] myproj = pyproj.Proj(sProj) else: myproj = pyproj.Proj(proj="geocent", ellps="WGS84", datum="WGS84") if args.hole != "": print("a hole will be isolated in the surface (stl only)") x0hole = float(args.hole[0]) x1hole = float(args.hole[1]) y0hole = float(args.hole[2]) y1hole = float(args.hole[3]) print("hole coordinates %f %f %f %f" % (x0hole, x1hole, y0hole, y1hole)) if args.proj != "": xy = pyproj.transform(lla, myproj, x0hole, y0hole, 0, radians=False) x0hole = xy[0] y0hole = xy[1] xy = pyproj.transform(lla, myproj, x1hole, y1hole, 0, radians=False) x1hole = xy[0] y1hole = xy[1] print("hole coordinates (projected) %f %f %f %f" % (x0hole, x1hole, y0hole, y1hole)) fh = open(args.input_file) NX = int(fh.readline().split()[1]) NY = int(fh.readline().split()[1]) xbotleft = float(fh.readline().split()[1]) ybotleft = float(fh.readline().split()[1]) dx = float(fh.readline().split()[1]) fh.readline()
N = 65.5 #75.5#79.5 #72.5 # S = 62.0 #63.0#76.0 #66.85 # E = -40.0 #-28.0#-52.0 #-26. # W = -48.0 #-56.2#-65.70 #-39. # #outletLat = 72.827 #outletLon = -54.309 outletLat = 62.863 #68.545 #72.846 outletLon = -42.608 #-32.718 #-54.016 glacierName = "All"#"Heimdal"#"NW"#"ThuleArea" #"KangerD" # convery to E_PS, N_PS = pyproj.transform(WGS84,PS_north,E,N) W_PS, S_PS = pyproj.transform(WGS84,PS_north,W,S) outletLon_PS,outletLat_PS = pyproj.transform(WGS84,PS_north,outletLon,outletLat) # THIS INCLUDES ICE FREE AREAS # TIFFS surface_tiff_ALL = 'Y:\\Documents\\DATA\\MORLIGHEM_NSIDC\\surface_Layer.tif' bed_tiff_ALL = 'Y:\\Documents\\DATA\\MORLIGHEM_NSIDC\\bed_Layer.tif' errbed_tiff_ALL = 'Y:\\Documents\\DATA\\MORLIGHEM_NSIDC\\errbed_Layer.tif' mask_tiff_ALL = 'Y:\\Documents\\DATA\\MORLIGHEM_NSIDC\\mask_Layer.tif' ## TIFFS - the one was added because needed to write new tiff wiht no compression #surface_tiff = 'Y:\\Documents\\DATA\\MORLIGHEM_NSIDC\\temp_surface_Layer.tif' #bed_tiff = 'Y:\\Documents\\DATA\\MORLIGHEM_NSIDC\\temp_bed_Layer.tif'
def run(FILE_NAME): DATAFIELD_NAME = 'NDVI_TOA' if USE_GDAL: # Gdal import gdal GRID_NAME = 'WELD_GRID' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) # Scale down the data by a factor of 5 so that low-memory machines # can handle it. gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64)[::5, ::5] # Get any needed attributes. meta = gdset.GetMetadata() scale = np.float(meta['scale_factor']) fillvalue = np.float(meta['_FillValue']) valid_range = [np.float(x) for x in meta['valid_range'].split(', ')] units = meta['units'] # Construct the grid. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() ny, nx = (gdset.RasterYSize / 5, gdset.RasterXSize / 5) x = np.linspace(x0, x0 + xinc*5*nx, nx) y = np.linspace(y0, y0 + yinc*5*ny, ny) xv, yv = np.meshgrid(x, y) del gdset else: if USE_NETCDF: from netCDF4 import Dataset # Scale down the data by a factor of 5 so that low-memory machines # can handle it. nc = Dataset(FILE_NAME) ncvar = nc.variables[DATAFIELD_NAME] ncvar.set_auto_maskandscale(False) data = ncvar[::5, ::5].astype(np.float64) # Get any needed attributes. scale = ncvar.scale_factor fillvalue = ncvar._FillValue valid_range = ncvar.valid_range units = ncvar.units gridmeta = getattr(nc, 'StructMetadata.0') else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.double) # Scale down the data by a factor of 6 so that low-memory machines # can handle it. data = data[::6, ::6] # Read attributes. attrs = data2D.attributes(full=1) vra=attrs["valid_range"] valid_range = vra[0] fva=attrs["_FillValue"] fillvalue = fva[0] sfa=attrs["scale_factor"] scale = sfa[0] ua=attrs["units"] units = ua[0] fattrs = hdf.attributes(full=1) ga = fattrs["StructMetadata.0"] gridmeta = ga[0] # Construct the grid. The needed information is in a global attribute # called 'StructMetadata.0'. Use regular expressions to tease out the # extents of the grid. ul_regex = re.compile(r'''UpperLeftPointMtrs=\( (?P<upper_left_x>[+-]?\d+\.\d+) , (?P<upper_left_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = ul_regex.search(gridmeta) x0 = np.float(match.group('upper_left_x')) y0 = np.float(match.group('upper_left_y')) lr_regex = re.compile(r'''LowerRightMtrs=\( (?P<lower_right_x>[+-]?\d+\.\d+) , (?P<lower_right_y>[+-]?\d+\.\d+) \)''', re.VERBOSE) match = lr_regex.search(gridmeta) x1 = np.float(match.group('lower_right_x')) y1 = np.float(match.group('lower_right_y')) ny, nx = data.shape x = np.linspace(x0, x1, nx) y = np.linspace(y0, y1, ny) xv, yv = np.meshgrid(x, y) # Apply the attributes to the data. invalid = np.logical_or(data < valid_range[0], data > valid_range[1]) invalid = np.logical_or(invalid, data == fillvalue) data[invalid] = np.nan data = data * scale data = np.ma.masked_array(data, np.isnan(data)) # Convert the grid back to lat/lon. The 1st and 2nd standard parallels, # the center meridian, and the latitude of projected origin are in the # projection parameters contained in the "StructMetadata.0" global # attribute. The following regular expression could have been used to # retrieve them. # # Ref: HDF-EOS Library User's Guide for the EOSDIS Evolution and # Development (EED) Contract, Volume 2, Revision 02: Function # Reference Guide, pages 1-6 through 1-13. # # aea_regex = re.compile(r'''Projection=GCTP_ALBERS\s+ProjParams= # \(\d+,\d+ # (?P<stdpr1>[-]?\d+), # (?P<stdpr2>[-]?\d+), # (?P<centmer>[-]?\d+), # (?P<origlat>[-]?\d+) # ,0{7}\)''', re.VERBOSE) # aea = pyproj.Proj("+proj=aea +lat_1=29.5 +lat2=45.5 +lon_0=-96 +lat_0=23") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(aea, wgs84, xv, yv) m = Basemap(projection='aea', resolution='i', lat_1=29.5, lat_2=45.5, lon_0=-96, lat_0=23, llcrnrlat=37.5, urcrnrlat = 42.5, llcrnrlon=-127.5, urcrnrlon = -122.5) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(35, 45, 1), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-130, -120, 1), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) long_name = DATAFIELD_NAME basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
import mpl_toolkits.basemap.pyproj as pyproj lla = pyproj.Proj(proj='latlong', ellps='WGS84', datum='WGS84') if args.proj[0] != 'geocent': sProj = args.proj[0] myproj = pyproj.Proj(sProj) else: myproj = pyproj.Proj(proj='geocent', ellps='WGS84', datum='WGS84') #read Ts file fid = open(args.ts_file) lines = fid.readlines() fid.close() foutname = args.ts_file[0:-3] + '_proj.ts' fout = open(foutname, 'w') for line in lines: if line.startswith('VRTX'): val = [float(val) for val in line.split()[1:5]] xyz = pyproj.transform(lla, myproj, val[1], val[2], val[3] * 1e3, radians=False) fout.write('VRTX ' + str(int(val[0])) + ' %.10e %.10e %.10e\n' % tuple(xyz)) else: fout.write(line) fout.close()
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'A_TB36.5H (Res 1)' if USE_GDAL: import gdal import mpl_toolkits.basemap.pyproj as pyproj GRID_NAME = 'Ascending_Land_Grid' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format( FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) meta = gdset.GetMetadata() _FillValue = float(meta['_FillValue']) # Construct the grid. # Reproject out of the global GCTP CEA into lat/lon. # Ref: http://nsidc.org/data/atlas/epsg_3410.html meta = gdset.GetMetadata() x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) args = [ "+proj=cea", "+lat_0=0", "+lon_0=0", "+lat_ts=30", "+a=6371228", "+units=m" ] pstereo = pyproj.Proj(' '.join(args)) wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(pstereo, wgs84, xv, yv) del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:, :].astype(np.float64) # Read geolocation dataset from HDF-EOS2 dumper output. GEO_FILE_NAME = 'lat_AMSR_E_L3_DailyLand_V06_20050118_Ascending_Land_Grid.output' try: GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) except KeyError: pass lat = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) GEO_FILE_NAME = 'lon_AMSR_E_L3_DailyLand_V06_20050118_Ascending_Land_Grid.output' try: GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) except KeyError: pass lon = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) # Read attributes. attrs = data2D.attributes(full=1) fva = attrs["_FillValue"] _FillValue = fva[0] # Apply the attributes information. # Ref: http://nsidc.org/data/docs/daac/ae_land3_l3_soil_moisture/data.html data[data == _FillValue] = np.nan data *= 0.1 data = np.ma.masked_array(data, np.isnan(data)) long_name = DATAFIELD_NAME units = 'Kelvin' m = Basemap(projection='cyl', resolution='l', llcrnrlat=-90, llcrnrlon=-180, urcrnrlat=90, urcrnrlon=180) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-90, 91, 30), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 181, 45), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def on_go(datafile,calbfile,dirname, calb_interp,linear_interp, despike, x_var, y_var, intsp_x, intsp_y, arcgrdval, data_match, buff, shape, gisgrd, surf, geopng, topo, blank, coord): #Variables related to direction of data collection negative_gradient = True x_var = 1 #average measurement spacing 0.5 y_var = 4 #average traverse spacing 0.25 print 'files opened' set_status_var('files opened') #define all the file paths filename = os.path.basename(datafile) #dirname = os.path.dirname(datafile) output_name = dirname + '/' + filename[0:-4] + '_calibrated.csv' if topo: if linear_interp.isChecked(): topo_path = dirname + '/' + 'Linear' + '/' + 'topo' else: topo_path = dirname + '/' + 'Cubic' + '/' + 'topo' if not os.path.exists(topo_path): os.makedirs(topo_path, mode = 511) if geopng: if linear_interp.isChecked(): raw_img_path = dirname + '/' + 'Linear' + '/' + 'raw_images' else: raw_img_path = dirname + '/' + 'Cubic' + '/' + 'raw_images' if not os.path.exists(raw_img_path): os.makedirs(raw_img_path, mode = 511) corr_img_path = dirname + '/' + 'Linear' + '/' + 'corr_images' else: corr_img_path = dirname + '/' + 'Cubic' + '/' + 'corr_images' if not os.path.exists(corr_img_path): os.makedirs(corr_img_path, mode = 511) if gisgrd: if linear_interp.isChecked(): raw_gis_path = dirname + '/' + 'Linear' + '/' + 'raw_gis_grids' else: raw_gis_path = dirname + '/' + 'Cubic' + '/' + 'raw_gis_grids' if not os.path.exists(raw_gis_path): os.makedirs(raw_gis_path, mode = 511) corr_gis_path = dirname + '/' + 'Linear' + '/' + 'corr_gis_grids' else: corr_gis_path = dirname + '/' + 'Cubic' + '/' + 'corr_gis_grids' if not os.path.exists(corr_gis_path): os.makedirs(corr_gis_path, mode = 511) #loads data from datafile into data array data = np.loadtxt(datafile, skiprows=1, usecols =(0,1,2,3,4,5,6,7,8)) #Convert data to OS for use in GIS osgb36=pyproj.Proj("+init=EPSG:27700") UTM30N=pyproj.Proj("+init=EPSG:32630") n_data = data[:,0] e_data = data[:,1] data_coord = np.array(pyproj.transform(UTM30N, osgb36, e_data, n_data)).T data = np.column_stack((data_coord, data[:,2:9])) output_orig = dirname + '/' + filename[0:-4] + '_OS_transform.csv' outputOS = open(output_orig, 'w') np.savetxt(outputOS, data, fmt = '%8.3f', delimiter = ',', header = 'Eastings, Northings,Altitude,C1,I1,C2,I2,C3,I3', comments = '') #loads data from calibration file into array calb = np.loadtxt(calbfile, skiprows=1, usecols =(0,1,2,3,4,5,6,7,8)) #Convert calibration to OS for use in GIS n_calb = calb[:,0] e_calb = calb[:,1] calb_coord = np.array(pyproj.transform(UTM30N, osgb36, e_calb, n_calb)).T calb = np.column_stack((calb_coord, calb[:,2:9])) print 'loaded into arrays' set_status_var('loaded into arrays') #initiates spike removal for calibration and data arrays if despike: data = spike_removal(data[:,0:3],data[:,3:9], 2, 2) calb = spike_removal(calb[:,0:3],calb[:,3:9], 3, 2) #produces arrays containing only coordinates data_coord = np.array(data[:,0:3]) calb_coord = np.array(calb[:,0:3]) #Corrects data against drift calibration file out_array = drift_calb(data_coord,data,calb_coord,calb,interp_method) output = open(output_name, 'w') header = 'Eastings, Northings,Altitude,C1,I1,C2,I2,C3,I3' print>>output, header mean = np.mean(out_array,axis=0) print mean out_array[:,3:9] = np.subtract(out_array[:,3:9],mean[3:9]) np.savetxt(output, out_array, fmt = '%8.3f', delimiter=',') #peform convex hull peeling out_array = hull_peeling(data,85.0) #Begin Periodic Filtering ''' I have taken this Periodic Filter routine out because it was trying to setCentralWidget in a QDialog object where this only works with a QMainWindow. I suspect it was not really still meant to be in there at all. The program runs if I get rid of it, it doesn't if I don't! ''' #periodic_filter(out_array[:,3:9]) ''' data_fft = np.fft.rfft(out_array[:,3], axis=0) data_fft = abs(data_fft) data_fft = np.log10(data_fft) plt.plot(data_fft) plt.axis([0, len(data_fft),np.min(data_fft),np.max(data_fft)]) plt.grid(True) plt.show() a,b = fedit([('start','0'),('stop','100')], title="Periodic Filter Range") a,b = int(a),int(b) np.savetxt('data_fft.csv', data_fft, delimiter=',') for i in range(3,9): temp = np.fft.rfft(out_array[:,i], axis=0) temp[a:b] = 0 temp[-a:-b] = 0 out_array[:,i] = np.fft.irfft(temp, n=len(out_array[:,i]), axis=0) np.savetxt('filtered.csv', data, delimiter=',') ''' #begin calculation of initial vector x_gradients = np.subtract(out_array[1:-1,0],out_array[0:-2,0]) y_gradients = np.subtract(out_array[1:-1,1],out_array[0:-2,1]) print len(x_gradients), len(y_gradients) old_err_state = np.seterr(divide='ignore') gradients = np.divide(y_gradients,x_gradients) gradients = np.abs(gradients) med_gradient = np.median(gradients) if negative_gradient: med_gradient = 1/ med_gradient print 'gradient', med_gradient #transform xy values to origin #xy_min = np.min(out_array[:,0:2],axis=0) #new_xy = np.subtract(out_array[:,0:2],xy_min) new_xy = out_array[:,0:2] y_corect = np.multiply((1/med_gradient),new_xy[:,1]) x_corect = np.multiply(new_xy[:,0],-1/med_gradient) xy_corect = np.column_stack((y_corect,x_corect)) new_xy = np.subtract(xy_corect,new_xy) new_xy = np.subtract(0,new_xy) xy_min = np.min(new_xy,axis=0) new_xy = np.subtract(new_xy, xy_min) np.savetxt('new_xy.csv',new_xy,delimiter=',') np.savetxt('xy_correct.csv',xy_corect,delimiter=',') out_array[:,0:2] = new_xy #interpolates data to a rectangular grid if negative_gradient: x_var, y_var = y_var, x_var x_space = int((np.max(out_array[:,0])- np.min(out_array[:,0]))/x_var) y_space = int((np.max(out_array[:,1])- np.min(out_array[:,1]))/y_var) print x_space, y_space xi = np.linspace(int(np.min(out_array[:,0])),int(np.max(out_array[:,0])),x_space) yi = np.linspace(int(np.min(out_array[:,1])),int(np.max(out_array[:,1])),y_space) xyi = cartesian(([xi],[yi])) interp_array = xyi #interpolates using Inverse Distance Weighting Algorithm for i in range(3,9): invdisttree = Invdisttree(out_array[:,0:2],out_array[:,i]) vars()['d'+str(i)] = invdisttree( xyi, nnear=8, eps=0, p=1, weights=None) np.savetxt('d'+str(i)+'.csv', vars()['d'+str(i)], delimiter=',') interp_array = np.column_stack((interp_array,vars()['d'+str(i)])) #print vars()['d'+str(i)] print i np.savetxt('interp.csv', interp_array, fmt = '%8.3f', delimiter=',') #reinterpolates using piecewise cubic method ''' x_space = int((np.max(out_array[:,0])- np.min(out_array[:,0]))/0.125) y_space = int((np.max(out_array[:,1])- np.min(out_array[:,1]))/0.125) xi = np.linspace(int(np.min(out_array[:,0])),int(np.max(out_array[:,0])),x_space) yi = np.linspace(int(np.min(out_array[:,1])),int(np.max(out_array[:,1])),y_space) xyi = cartesian(([xi],[yi])) interp_array2 = xyi for i in range(3,9): vars()['e'+str(i)] = interpolate.griddata(interp_array[:,0:2], vars()['d'+str(i)], (xi, yi), method='cubic') np.savetxt('e'+str(i)+'.csv', vars()['e'+str(i)], delimiter=',') for i in range(3,9): interpolater = interpolate.CloughTocher2DInterpolator(interp_array[:,0:2],vars()['d'+str(i)]) vars()['e'+str(i)] = interpolater(xyi) np.savetxt('e'+str(i)+'.csv', vars()['e'+str(i)], delimiter=',') interp_array2 = np.column_stack((interp_array2,vars()['e'+str(i)])) np.savetxt('interp2.csv', interp_array2, delimiter=',') output.close ''' '''
import folium import mpl_toolkits.basemap.pyproj as pyproj shape_files = r"C:\Users\andrewd\Desktop\gb_shapes\Data" shp = shapefile.Reader(os.path.join(shape_files, 'county_region.shp')) wgs84=pyproj.Proj("+init=EPSG:4326") osgb36=pyproj.Proj("+init=EPSG:27700") features = [] for shape in shp.iterShapes(): shape_data = shape.__geo_interface__ for in_shape in shape_data['coordinates']: try: coords = [[pyproj.transform(osgb36, wgs84, *pt) for pt in in_shape]] break except: continue features.append({"type": "Feature" , "properties": {"name": "A random polygon"} , "geometry": {"type": "Polygon" , "coordinates": coords}}) #break data = {"type": "FeatureCollection" , "features": features} #print data input_file = 'test.json'
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'Albedo_BSA_Band1' if USE_GDAL: import gdal # Read dataset. GRID_NAME = 'NPP_Grid_BRDF' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format(FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Read parameters for constructing the grid. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) # Construct the grid. x = np.linspace(x0, x0 + xinc*nx, nx) y = np.linspace(y0, y0 + yinc*ny, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(sinu, wgs84, xv, yv) # Read fill value, valid range, scale factor, add_offset attributes. meta = gdset.GetMetadata() # Apply the scale factor, valid range, fill value because GDAL does not # do this. Also, GDAL reads the attributes as character values, so we # have to properly convert them. _FillValue = float(meta['_FillValue']) valid_range = [float(x) for x in meta['valid_range'].split(', ')] scale_factor = float(meta['scale_factor']) add_offset = float(meta['add_offset']) units = meta['units'] long_name = meta['long_name'] del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.double) # Read geolocation dataset from HDF-EOS2 dumper output. GEO_FILE_NAME = 'lat_NPP_D16BRDF3_L3D.A2012241.h20v03.C1_03001.2012258151353.output' GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) lat = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) lat = lat.reshape(data.shape) GEO_FILE_NAME = 'lon_NPP_D16BRDF3_L3D.A2012241.h20v03.C1_03001.2012258151353.output' GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) lon = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) lon = lon.reshape(data.shape) # Read attributes attrs = data2D.attributes(full=1) lna=attrs["long_name"] long_name = lna[0] vra=attrs["valid_range"] valid_range = vra[0] aoa=attrs["add_offset"] add_offset = aoa[0] fva=attrs["_FillValue"] _FillValue = fva[0] sfa=attrs["scale_factor"] scale_factor = sfa[0] ua=attrs["units"] units = ua[0] invalid = np.logical_or(data < valid_range[0], data > valid_range[1]) invalid = np.logical_or(invalid, data == _FillValue) data[invalid] = np.nan data = data * scale_factor + add_offset data = np.ma.masked_array(data, np.isnan(data)) m = Basemap(projection='cyl', resolution='i', lon_0=-10, llcrnrlat=45, urcrnrlat = 65, llcrnrlon=25, urcrnrlon = 65) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(45, 61, 5), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(25, 56, 10), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb=m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
triangles = np.asarray(trl) ntriangles = np.shape(triangles)[0] logging.debug("reindexing triangles...") for itr in range(ntriangles): for iv in range(3): triangles[itr, iv] = vid[triangles[itr, iv]] nnodes = np.shape(nodes)[0] nfacets = nfacets + ntriangles logging.debug("done reading %s, found %d nodes and %d triangles" % (surface_name, nnodes, ntriangles)) if args.proj != '': logging.info("projecting the nodes coordinates") xyzb = pyproj.transform(lla, myproj, nodes[:, 0], nodes[:, 1], 1e3 * nodes[:, 2], radians=False) nodes[:, 0] = xyzb[0] nodes[:, 1] = xyzb[1] nodes[:, 2] = xyzb[2] nodes[:, 0] = nodes[:, 0] + args.translate[0] nodes[:, 1] = nodes[:, 1] + args.translate[1] if args.tokm: nodes[:, :] = nodes[:, :] / 1e3 #compute efficiently the normals logging.debug("computing the normals") normal = np.cross( nodes[triangles[:, 1], :] - nodes[triangles[:, 0], :], nodes[triangles[:, 2], :] - nodes[triangles[:, 0], :])
x_lon_81 = fh3.variables['lon'][:].copy() #x-coord (latlon) y_lat_81 = fh3.variables['lat'][:].copy() #y-coord (latlon) #zs = fh2.variables['height'][:].copy() #height in m - is this surface elevation or SMB? ts_81 = fh3.variables['time'][:].copy() smb_81_raw = fh3.variables['gld'][:].copy( ) #acc SMB in mm/day weq...need to convert fh3.close() print 'Now transforming coordinate system of SMB' wgs84 = pyproj.Proj( "+init=EPSG:4326" ) # LatLon with WGS84 datum used by GPS units and Google Earth psn_gl = pyproj.Proj( "+init=epsg:3413" ) # Polar Stereographic North used by BedMachine (as stated in NetDCF header) xs, ys = pyproj.transform(wgs84, psn_gl, x_lon, y_lat) xs_81, ys_81 = pyproj.transform(wgs84, psn_gl, x_lon_81, y_lat_81) #Xs = xs[0:,] #flattening; note that x-dimension is 402 according to file header #Ys = ys[:,0] #flattening; note that y-dimension is 602 according to file header smb_init = smb_raw[0][0] smb_latest = smb_raw[-1][0] #smb_init_interpolated = interpolate.interp2d(ys, xs, smb_init, kind='linear') Xmat, Ymat = np.meshgrid(X, Y) regridded_smb_init = interpolate.griddata((xs.ravel(), ys.ravel()), smb_init.ravel(), (Xmat, Ymat), method='nearest') regridded_smb_latest = interpolate.griddata((xs.ravel(), ys.ravel()), smb_latest.ravel(), (Xmat, Ymat), method='nearest')
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = '500m 16 days EVI' if USE_GDAL: import gdal GRID_NAME = 'MODIS_Grid_16DAY_500m_VI' gname = 'HDF4_EOS:EOS_GRID:"{0}":{1}:{2}'.format( FILE_NAME, GRID_NAME, DATAFIELD_NAME) gdset = gdal.Open(gname) data = gdset.ReadAsArray().astype(np.float64) # Construct the grid. x0, xinc, _, y0, _, yinc = gdset.GetGeoTransform() nx, ny = (gdset.RasterXSize, gdset.RasterYSize) x = np.linspace(x0, x0 + xinc * nx, nx) y = np.linspace(y0, y0 + yinc * ny, ny) xv, yv = np.meshgrid(x, y) # In basemap, the sinusoidal projection is global, so we won't use it. # Instead we'll convert the grid back to lat/lons. sinu = pyproj.Proj("+proj=sinu +R=6371007.181 +nadgrids=@null +wktext") wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat = pyproj.transform(sinu, wgs84, xv, yv) # Read the attributes. meta = gdset.GetMetadata() long_name = meta['long_name'] units = meta['units'] _FillValue = np.float(meta['_FillValue']) scale_factor = np.float(meta['scale_factor']) valid_range = [np.float(x) for x in meta['valid_range'].split(', ')] del gdset else: from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:, :].astype(np.double) # Read geolocation dataset from HDF-EOS2 dumper output. GEO_FILE_NAME = 'lat_MOD13A1.A2007257.h09v05.005.2007277183254.output' GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) lat = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) lat = lat.reshape(data.shape) GEO_FILE_NAME = 'lon_MOD13A1.A2007257.h09v05.005.2007277183254.output' GEO_FILE_NAME = os.path.join(os.environ['HDFEOS_ZOO_DIR'], GEO_FILE_NAME) lon = np.genfromtxt(GEO_FILE_NAME, delimiter=',', usecols=[0]) lon = lon.reshape(data.shape) # Read attributes. attrs = data2D.attributes(full=1) lna = attrs["long_name"] long_name = lna[0] vra = attrs["valid_range"] valid_range = vra[0] fva = attrs["_FillValue"] _FillValue = fva[0] sfa = attrs["scale_factor"] scale_factor = sfa[0] ua = attrs["units"] units = ua[0] invalid = np.logical_or(data > valid_range[1], data < valid_range[0]) invalid = np.logical_or(invalid, data == _FillValue) data[invalid] = np.nan data = data / scale_factor data = np.ma.masked_array(data, np.isnan(data)) m = Basemap(projection='cyl', resolution='i', llcrnrlat=25, urcrnrlat=45, llcrnrlon=-120, urcrnrlon=-90) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(20, 50, 10), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-125, -75, 10), labels=[0, 0, 0, 1]) m.pcolormesh(lon[::2], lat[::2], data[::2], latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.py.png".format(basename) fig.savefig(pngfile)
def run(FILE_NAME): # Identify the data field. DATAFIELD_NAME = 'SI_12km_SH_ICECON_DAY' from pyhdf.SD import SD, SDC hdf = SD(FILE_NAME, SDC.READ) # Read dataset. data2D = hdf.select(DATAFIELD_NAME) data = data2D[:,:].astype(np.float64) # There are multiple girds in this file. # Thus, simple regular expression search for # UpperLeft/LowerRightPoint from StructMetadata.0 won't work. # # Use HDFView and look for the following parameters: # # GROUP=GRID_2 # GridName="SpPolarGrid06km" # XDim=1264 # YDim=1328 # UpperLeftPointMtrs=(-3950000.000000,4350000.000000) # LowerRightMtrs=(3950000.000000,-3950000.000000) ny, nx = data.shape x1 = 3950000 x0 = -3950000 y0 = 4350000 y1 = -3950000 xinc = (x1 - x0) / nx yinc = (y1 - y0) / ny # Handle land mask. # http://nsidc.org/data/docs/daac/ae_si12_12km_seaice/data.html data[data > 100.0] = np.nan data = np.ma.masked_array(data, np.isnan(data)) # Construct the grid. # Reproject out of the GCTP stereographic into lat/lon. x = np.linspace(x0, x0 + xinc*nx, nx) y = np.linspace(y0, y0 + yinc*ny, ny) xv, yv = np.meshgrid(x, y) args = ["+proj=stere", "+lat_0=-90", "+lon_0=0", "+lat_ts=-70", "+k=1", "+es=0.006693883", "+a=6378273", "+x_0=0", "+y_0=0", "+ellps=WGS84", "+datum=WGS84"] pstereo = pyproj.Proj(' '.join(args)) wgs84 = pyproj.Proj("+init=EPSG:4326") lon, lat= pyproj.transform(pstereo, wgs84, xv, yv) units = 'Percent' long_name = DATAFIELD_NAME m = Basemap(projection='spstere', resolution='l', boundinglat=-45, lon_0 = 0) m.drawcoastlines(linewidth=0.5) m.drawparallels(np.arange(-80, 0, 20), labels=[1, 0, 0, 0]) m.drawmeridians(np.arange(-180, 181, 30), labels=[0, 0, 0, 1]) m.pcolormesh(lon, lat, data, latlon=True) cb = m.colorbar() cb.set_label(units) basename = os.path.basename(FILE_NAME) plt.title('{0}\n{1}'.format(basename, long_name)) fig = plt.gcf() # plt.show() pngfile = "{0}.s.py.png".format(basename) fig.savefig(pngfile)