def __init__(self, src): """ initialize shapefile reader """ if isinstance(src, unicode): src = src.encode('ascii', 'ignore') src = self.find_source(src) self.shpSrc = src self.sr = shapefile.Reader(src) self.recs = [] self.shapes = {} self.load_records() self.proj = None # Check if there's a spatial reference prj_src = src[:-4] + '.prj' if exists(prj_src): prj_text = open(prj_src).read() srs = SpatialReference() if srs.ImportFromWkt(prj_text): raise ValueError("Error importing PRJ information from: %s" % prj_file) if srs.IsProjected(): try: self.proj = pyproj.Proj(srs.ExportToProj4()) except: # e.g. ERROR 6: No translation for Lambert_Conformal_Conic to PROJ.4 format is known. srs.MorphFromESRI() self.proj = pyproj.Proj(srs.ExportToProj4())
def __init__(self, src): """ initialize shapefile reader """ if isinstance(src, unicode): src = src.encode('ascii', 'ignore') src = self.find_source(src) self.shpSrc = src self.sr = shapefile.Reader(src) self.recs = [] self.intersect_tol = .3 self.max_area_for_circle = .002 self.high_exp_factor = 1.75 self.shapes = {} self.geoms = {} self.load_records() self.proj = None # Check if there's a spatial reference prj_src = src[:-4] + '.prj' if exists(prj_src): prj_text = open(prj_src).read() srs = SpatialReference() wkt_ret = srs.ImportFromWkt(prj_text) # print 'prj_text={0}'.format(prj_text) #print "srs={0}".format(srs) if wkt_ret: raise ValueError("Error importing PRJ information from: %s" % prj_file) if srs.IsProjected(): export_srs = srs.ExportToProj4() # print 'srs.IsProjected' #print "Groomp" # self.proj=pyproj.Proj(proj='utm',zone=10,ellps='WGS84') self.proj = pyproj.Proj(export_srs) else: self.proj = None # print 'self.proj = None' #export_srs=srs.ExportToProj4() #self.proj=pyproj.Proj(init='epsg:26915') #self.proj = pyproj.Proj(export_srs) else: print 'choo'
def set_netcdf_metadata_attributes( self, to_crs='EPSG:4326', do_stats=False): ''' Function to set all NetCDF metadata attributes using self.METADATA_MAPPING to map from NetCDF ACDD global attribute name to metadata path (e.g. xpath) Parameter: to_crs: EPSG or WKT for spatial metadata do_stats: Boolean flag indicating whether minmax stats should be determined (slow) ''' assert self.METADATA_MAPPING, 'No metadata mapping defined' assert self._netcdf_dataset, 'NetCDF output dataset not defined.' # assert self._metadata_dict, 'No metadata acquired' # Set geospatial attributes try: grid_mapping = [variable.grid_mapping for variable in self._netcdf_dataset.variables.values( ) if hasattr(variable, 'grid_mapping')][0] except: logger.error( 'Unable to determine grid_mapping for spatial reference') raise crs = self._netcdf_dataset.variables[grid_mapping] spatial_ref = crs.spatial_ref geoTransform = [float(string) for string in crs.GeoTransform.strip().split(' ')] xpixels, ypixels = ( dimension.size for dimension in self._netcdf_dataset.dimensions.values()) dimension_names = ( dimension.name for dimension in self._netcdf_dataset.dimensions.values()) # Create nested list of bounding box corner coordinates bbox_corners = [[geoTransform[0] + (x_pixel_offset * geoTransform[1]) + (y_pixel_offset * geoTransform[2]), geoTransform[3] + (x_pixel_offset * geoTransform[4]) + (y_pixel_offset * geoTransform[5])] for x_pixel_offset in [0, xpixels] for y_pixel_offset in [0, ypixels]] if to_crs: # Coordinate transformation required from_spatial_ref = SpatialReference() from_spatial_ref.ImportFromWkt(spatial_ref) to_spatial_ref = SpatialReference() # Check for EPSG then Well Known Text epsg_match = re.match('^EPSG:(\d+)$', to_crs) if epsg_match: to_spatial_ref.ImportFromEPSG(int(epsg_match.group(1))) else: # Assume valid WKT definition to_spatial_ref.ImportFromWkt(to_crs) coord_trans = CoordinateTransformation( from_spatial_ref, to_spatial_ref) extents = np.array( [coord[0:2] for coord in coord_trans.TransformPoints(bbox_corners)]) spatial_ref = to_spatial_ref.ExportToWkt() centre_pixel_coords = [coord[0:2] for coord in coord_trans.TransformPoints( [[geoTransform[0] + (x_pixel_offset * geoTransform[1]) + (y_pixel_offset * geoTransform[2]), geoTransform[3] + (x_pixel_offset * geoTransform[4]) + (y_pixel_offset * geoTransform[5])] for x_pixel_offset in [xpixels // 2, xpixels // 2 + 1] for y_pixel_offset in [ypixels // 2, ypixels // 2 + 1]] ) ] # Use Pythagoras to compute centre pixel size in new coordinates # (never mind the angles) yres = pow(pow(centre_pixel_coords[1][0] - centre_pixel_coords[0][0], 2) + pow( centre_pixel_coords[1][1] - centre_pixel_coords[0][1], 2), 0.5) xres = pow(pow(centre_pixel_coords[2][0] - centre_pixel_coords[0][0], 2) + pow( centre_pixel_coords[2][1] - centre_pixel_coords[0][1], 2), 0.5) # TODO: Make this more robust - could pull single unit from WKT if to_spatial_ref.IsGeographic(): xunits, yunits = ('degrees_east', 'degrees_north') elif to_spatial_ref.IsProjected(): xunits, yunits = ('m', 'm') else: xunits, yunits = ('unknown', 'unknown') else: # Use native coordinates extents = np.array(bbox_corners) xres = round(geoTransform[1], Geophys2NetCDF.DECIMAL_PLACES) yres = round(geoTransform[5], Geophys2NetCDF.DECIMAL_PLACES) xunits, yunits = (self._netcdf_dataset.variables[ dimension_name].units for dimension_name in dimension_names) xmin = np.min(extents[:, 0]) ymin = np.min(extents[:, 1]) xmax = np.max(extents[:, 0]) ymax = np.max(extents[:, 1]) attribute_dict = dict(zip(['geospatial_lon_min', 'geospatial_lat_min', 'geospatial_lon_max', 'geospatial_lat_max'], [xmin, ymin, xmax, ymax] ) ) attribute_dict['geospatial_lon_resolution'] = xres attribute_dict['geospatial_lat_resolution'] = yres attribute_dict['geospatial_lon_units'] = xunits attribute_dict['geospatial_lat_units'] = yunits try: convex_hull = [coordinate[0:2] for coordinate in coord_trans.TransformPoints( netcdf2convex_hull(self.netcdf_dataset, 2000000000))] # Process dataset in pieces <= 2GB in size except: logger.info('Unable to compute convex hull. Using rectangular bounding box instead.') convex_hull = [coordinate[0:2] for coordinate in coord_trans.TransformPoints(bbox_corners + [bbox_corners[0]])] attribute_dict['geospatial_bounds'] = 'POLYGON((' + ', '.join([' '.join( ['%.4f' % ordinate for ordinate in coordinates]) for coordinates in convex_hull]) + '))' attribute_dict['geospatial_bounds_crs'] = spatial_ref for key, value in attribute_dict.items(): setattr(self._netcdf_dataset, key, value) # Set attributes defined in self.METADATA_MAPPING # Scan list in reverse to give priority to earlier entries #TODO: Improve this coding - it's a bit crap keys_read = [] for key, metadata_path in self.METADATA_MAPPING: # Skip any keys already read if key in keys_read: continue value = self.get_metadata(metadata_path) if value is not None: logger.debug('Setting %s to %s', key, value) # TODO: Check whether hierarchical metadata required setattr(self._netcdf_dataset, key, value) keys_read.append(key) else: logger.warning( 'WARNING: Metadata path %s not found', metadata_path) unread_keys = sorted( list(set([item[0] for item in self.METADATA_MAPPING]) - set(keys_read))) if unread_keys: logger.warning( 'WARNING: No value found for metadata attribute(s) %s' % ', '.join(unread_keys)) # Ensure only one DOI is stored - could be multiple, comma-separated # entries if hasattr(self._netcdf_dataset, 'doi'): url_list = [url.strip() for url in self._netcdf_dataset.doi.split(',')] doi_list = [url for url in url_list if url.startswith( 'http://dx.doi.org/')] if len(url_list) > 1: # If more than one URL in list try: # Give preference to proper DOI URL url = doi_list[0] # Use first (preferably only) DOI URL except: url = url_list[0] # Just use first URL if no DOI found url = url.replace('&', '&') self._netcdf_dataset.doi = url # Set metadata_link to NCI metadata URL self._netcdf_dataset.metadata_link = 'https://pid.nci.org.au/dataset/%s' % self.uuid self._netcdf_dataset.Conventions = 'CF-1.6, ACDD-1.3' if do_stats: datastats = DataStats(netcdf_dataset=self.netcdf_dataset, netcdf_path=None, max_bytes=2000000000) # 2GB pieces datastats.data_variable.actual_range = np.array( [datastats.value('min'), datastats.value('max')], dtype='float32') # Remove old fields - remove this later if hasattr(self._netcdf_dataset, 'id'): del self._netcdf_dataset.id if hasattr(self._netcdf_dataset, 'ga_uuid'): del self._netcdf_dataset.ga_uuid if hasattr(self._netcdf_dataset, 'keywords_vocabulary'): del self._netcdf_dataset.keywords_vocabulary
def rough_convert_metres_to_spatial_ref_units( in_spatial_ref: osr.SpatialReference, extent: list, distance: float) -> float: """Convert from meters to the units of a given spatial_ref DO NOT USE THIS FOR ACCURATE DISTANCES. IT'S GOOD FOR A QUICK CALCULATION WHEN DISTANCE PRECISION ISN'T THAT IMPORTANT Args: in_spatial_ref (osr.SpatialReference): osr.SpatialRef to use as a reference extent (list): The extent of our dataset: [min_x, min_y, max_x, max_y]. We use this to find the centerpoint distance (float): Distance in meters to convert Raises: VectorBaseException: [description] Returns: float: Distance in the spatial_ref's units """ # If the input ref uses a projected coordinate system in meters then simply return the distance. # If it's projected but in some other units then throw an exception. # If it's in degrees then continue with the code below to convert it to metres. if in_spatial_ref.IsProjected() == 1: if in_spatial_ref.GetAttrValue('unit').lower() in [ 'meter', 'metre', 'm' ]: return distance else: raise VectorBaseException( 'Unhandled projected coordinate system linear units: {}'. format(in_spatial_ref.GetAttrValue('unit'))) # Get the centroid of the Shapefile spatial extent extent_ring = ogr.Geometry(ogr.wkbLinearRing) extent_ring.AddPoint(extent[0], extent[2]) extent_ring.AddPoint(extent[1], extent[2]) extent_ring.AddPoint(extent[1], extent[3]) extent_ring.AddPoint(extent[0], extent[3]) extent_ring.AddPoint(extent[0], extent[2]) extent_poly = ogr.Geometry(ogr.wkbPolygon) extent_poly.AddGeometry(extent_ring) extent_centroid = extent_poly.Centroid() # Go diagonally on the extent rectangle pt1_orig = Point(extent[0], extent[2]) pt2_orig = Point(extent[1], extent[3]) orig_dist = pt1_orig.distance(pt2_orig) # Determine the UTM zone by locating the centroid of the shapefile extent # Then get the transformation required to convert to the Shapefile to this UTM zone utm_epsg = get_utm_zone_epsg(extent_centroid.GetX()) in_spatial_ref.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) out_spatial_ref = osr.SpatialReference() out_spatial_ref.ImportFromEPSG(int(utm_epsg)) out_spatial_ref.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) VectorBase.log.debug( 'Original spatial reference is : \n {0} (AxisMappingStrategy:{1})' .format(*VectorBase.get_srs_debug(in_spatial_ref))) VectorBase.log.debug( 'Transform spatial reference is : \n {0} (AxisMappingStrategy:{1})' .format(*VectorBase.get_srs_debug(out_spatial_ref))) transform_forward = osr.CoordinateTransformation( in_spatial_ref, out_spatial_ref) pt1_ogr = VectorBase.shapely2ogr(pt1_orig, transform_forward) pt2_ogr = VectorBase.shapely2ogr(pt2_orig, transform_forward) pt1_proj = VectorBase.ogr2shapely(pt1_ogr) pt2_proj = VectorBase.ogr2shapely(pt2_ogr) proj_dist = pt1_proj.distance(pt2_proj) output_distance = (orig_dist / proj_dist) * distance VectorBase.log.info( '{}m distance converts to {:.10f} using UTM EPSG {}'.format( distance, output_distance, utm_epsg)) if output_distance > 360: raise VectorBaseException( 'Projection Error: \'{:,}\' is larger than the maximum allowed value' .format(output_distance)) return output_distance