def osr_ct_6(): if gdaltest.have_proj4 == 0: return 'skip' ct = osr.CreateCoordinateTransformation(None, None) if ct is not None: return 'fail' utm_srs = osr.SpatialReference() utm_srs.SetUTM(11) utm_srs.SetWellKnownGeogCS('WGS84') ll_srs = osr.SpatialReference() ll_srs.SetWellKnownGeogCS('WGS84') ct = osr.CoordinateTransformation(ll_srs, utm_srs) if ct is None: return 'fail' result = ct.TransformPoints(((-117.5, 32.0, 0.0), (-117.5, 32.0))) for i in range(2): if abs(result[i][0] - 452772.06) > 0.01 \ or abs(result[i][1] - 3540544.89 ) > 0.01 \ or abs(result[i][2] - 0.0) > 0.01: gdaltest.post_reason('Wrong LL to UTM result') return 'fail' return 'success'
def find_plots_intersect_boundingbox(bounding_box, all_plots, fullmac=True): """Take a list of plots from BETY and return only those overlapping bounding box. fullmac -- only include full plots (omit KSU, omit E W partial plots) """ bbox_poly = ogr.CreateGeometryFromJson(str(bounding_box)) bb_sr = bbox_poly.GetSpatialReference() intersecting_plots = dict() # for plotname in all_plots: # # if fullmac and (plotname.find("KSU") > -1 or plotname.endswith(" E") or plotname.endswith(" W")): # # continue for index, row in all_plots.iterrows(): bounds = str(row.geometry) #bounds = all_plots[plotname] yaml_bounds = yaml.safe_load(bounds) #current_poly = ogr.CreateGeometryFromJson(json.dumps(yaml_bounds)) current_poly = ogr.CreateGeometryFromWkt(yaml_bounds) # Check for a need to convert coordinate systems check_poly = current_poly if bb_sr: poly_sr = current_poly.GetSpatialReference() if poly_sr and not bb_sr.IsSame(poly_sr): # We need to convert to the same coordinate system before an intersection check_poly = convert_geometry(current_poly, bb_sr) transform = osr.CreateCoordinateTransformation(poly_sr, bb_sr) new_poly = current_poly.Clone() if new_poly: new_poly.Transform(transform) check_poly = new_poly intersection_with_bounding_box = bbox_poly.Intersection(check_poly) if intersection_with_bounding_box is not None: intersection = json.loads( intersection_with_bounding_box.ExportToJson()) if 'coordinates' in intersection and len( intersection['coordinates']) > 0: intersecting_plots[row.ID] = bounds return intersecting_plots
def xLonyLat2PixelXY(self, xLonArr, yLatArr, imName): if self.dtSet is None: logging.error("image were not opened when converting coordinates") return None geoTrans = self.dtSet.GetGeoTransform() strProj = self.dtSet.GetProjection() srim = osr.SpatialReference() srim.ImportFromWkt(strProj) srgeo = srim.CloneGeogCS() coordtransform = osr.CreateCoordinateTransformation(srgeo, srim) xLonYLat = vstack((xLonArr, yLatArr)) #print xLonYLat projXLonYLat = coordtransform.TransformPoints(xLonYLat.T) xyPix = xLonYLat.T.astype(int64) coord_PTs_ProjGeo2Pix(array(projXLonYLat), geoTrans, xyPix) #print numpy.array(projXLonYLat) return xyPix
def pixelXY2xLonyLat(self, xPixArr, yPixArr): if self.dtSet is None: logging.error("image were not opened when converting coordinates") return None geoTrans = self.dtSet.GetGeoTransform() xyPix = hstack((xPixArr.reshape(-1, 1), yPixArr.reshape(-1, 1))) projXLonYLat = xyPix.astype(float64) coord_PTs_Pix2ProjGeo(xyPix, geoTrans, projXLonYLat) strProj = self.dtSet.GetProjection() srim = osr.SpatialReference() srim.ImportFromWkt(strProj) srgeo = srim.CloneGeogCS() coordtransform = osr.CreateCoordinateTransformation(srim, srgeo) xLonYLat = zeros((projXLonYLat.shape[0], 3), dtype=float64) xLonYLat[:, :] = coordtransform.TransformPoints(projXLonYLat) return xLonYLat # numpy.array(xLonYLat)
def extract_grid_ppt(sub_date_list, in_net_cdf, in_cat_shp, sub_out_cat_data_df, other_args, ppt_que): ''' Extract precipitation from a given list of netCDf files The catchments shapefile can have one more catchment polygons. Change names of variables and values inside the function, (not everything is specified through the arguments). ''' cat_vec = ogr.Open(in_cat_shp) lyr = cat_vec.GetLayer(0) spt_ref = lyr.GetSpatialRef() trgt = osr.SpatialReference() trgt.ImportFromEPSG(other_args[1]) tfm = osr.CreateCoordinateTransformation(spt_ref, trgt) back_tfm = osr.CreateCoordinateTransformation(trgt, spt_ref) #raise Exception feat_dict = {} feat_area_dict = {} cat_area_ratios_dict = {} cat_envel_dict = {} feat = lyr.GetNextFeature() while feat: geom = feat.GetGeometryRef() f_val = feat.GetFieldAsString(str(other_args[0])) if f_val is None: raise RuntimeError feat_area_dict[f_val] = geom.Area() # do before transform geom.Transform(tfm) feat_dict[f_val] = feat cat_envel_dict[f_val] = geom.GetEnvelope() # do after transform feat = lyr.GetNextFeature() #print 'Going through: %s' % netcdf in_nc = nc.Dataset(in_net_cdf) lat_arr = in_nc.variables[other_args[4]][:] lon_arr = in_nc.variables[other_args[3]][:] apply_cell_correc = other_args[6] # convert the netCDF time to regular time time_var = in_nc.variables[other_args[2]] time_arr = nc.num2date(in_nc.variables[other_args[2]][:], time_var.units, calendar=time_var.calendar) ppt_var = in_nc.variables[other_args[5]] #print 'Counting time from (in the netCDF file):',time_var.units #print 'Start date in the netCDF: ', time_arr[0] #print 'End date in the netCDF: ', time_arr[-1] #print 'Total time steps in the netCDF: ', time_arr.shape[0] cell_size = round(lon_arr[1] - lon_arr[0], 3) x_l_c = lon_arr[0] x_u_c = lon_arr[-1] y_l_c = lat_arr[0] y_u_c = lat_arr[-1] flip_lr = False flip_ud = False if x_l_c > x_u_c: x_l_c, x_u_c = x_u_c, x_l_c flip_lr = True if y_l_c > y_u_c: y_l_c, y_u_c = y_u_c, y_l_c flip_ud = True #raise Exception if apply_cell_correc: # because CHIRPS has values at the center of the cell # so I shift it back x_l_c -= (cell_size / 2.) x_u_c -= (cell_size / 2.) y_l_c -= (cell_size / 2.) y_u_c -= (cell_size / 2.) x_coords = np.arange(x_l_c, x_u_c * 1.00000001, cell_size) y_coords = np.arange(y_l_c, y_u_c * 1.00000001, cell_size) cat_x_idxs_dict = {} cat_y_idxs_dict = {} # print feat_dict.keys() for cat_no in feat_dict.keys(): #print 'Cat no:', cat_no geom = feat_dict[cat_no].GetGeometryRef() extents = cat_envel_dict[cat_no] cat_area = feat_area_dict[cat_no] inter_areas = [] x_low, x_hi, y_low, y_hi = extents # adjustment to get all cells intersecting the polygon x_low = x_low - cell_size x_hi = x_hi + cell_size y_low = y_low - cell_size y_hi = y_hi + cell_size x_cors_idxs = np.where( np.logical_and(x_coords >= x_low, x_coords <= x_hi))[0] y_cors_idxs = np.where( np.logical_and(y_coords >= y_low, y_coords <= y_hi))[0] x_cors = x_coords[x_cors_idxs] y_cors = y_coords[y_cors_idxs] cat_x_idxs = [] cat_y_idxs = [] for x_idx in range(x_cors.shape[0] - 1): for y_idx in range(y_cors.shape[0] - 1): ring = ogr.Geometry(ogr.wkbLinearRing) ring.AddPoint(x_cors[x_idx], y_cors[y_idx]) ring.AddPoint(x_cors[x_idx + 1], y_cors[y_idx]) ring.AddPoint(x_cors[x_idx + 1], y_cors[y_idx + 1]) ring.AddPoint(x_cors[x_idx], y_cors[y_idx + 1]) ring.AddPoint(x_cors[x_idx], y_cors[y_idx]) poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring) inter_poly = poly.Intersection(geom) # to get the area, I convert it to coordinate sys of # the shapefile that is hopefully in linear units inter_poly.Transform(back_tfm) inter_area = inter_poly.Area() inter_areas.append(inter_area) cat_x_idxs.append((x_cors[x_idx] - x_l_c) / cell_size) cat_y_idxs.append((y_cors[y_idx] - y_l_c) / cell_size) cat_area_ratios_dict[cat_no] = np.divide(inter_areas, cat_area) cat_x_idxs_dict[cat_no] = np.int64(np.round(cat_x_idxs, 6)) cat_y_idxs_dict[cat_no] = np.int64(np.round(cat_y_idxs, 6)) # print 'Normalized area sum:', np.sum(cat_area_ratios_dict[cat_no]) # # for cat_no in feat_dict.keys(): # print cat_no # print cat_y_idxs_dict[cat_no] # print cat_x_idxs_dict[cat_no] # print '\n' for idx, date in enumerate(time_arr): if date in sub_out_cat_data_df.index: all_ppt_vals = ppt_var[idx] if flip_lr: all_ppt_vals = np.fliplr(all_ppt_vals) if flip_ud: all_ppt_vals = np.flipud(all_ppt_vals) for cat_no in feat_dict.keys(): ppt_vals = all_ppt_vals[cat_y_idxs_dict[cat_no], cat_x_idxs_dict[cat_no]] fin_ppt_vals = np.multiply(ppt_vals, cat_area_ratios_dict[cat_no]) sub_out_cat_data_df.loc[date][cat_no] = \ round(np.sum(fin_ppt_vals), 2) in_nc.close() ppt_que.put(sub_out_cat_data_df) cat_vec.Destroy() return
def extract_grid_ppt_shpfile(_, in_net_cdf_files_list, in_cat_shp_files, sub_cat_out_ppt_data_df, dict_for_gridded_data_, _dict_for_coords_int_, time_freq, other_args): ''' Extract precipitation from a given list of netCDf files The catchments shapefile can have one more catchment polygons. # a list of some arguments # [field name to use as catchment names / numbers, # netCDF EPSG code, # netCDF time name, # netCDF X coords name, # netCDF Y coords name, # netCDF variable to read, # apply cell corner correction] other_args = ['DN', 32718, 'time', 'lat', 'long', 'ppt', True] ''' for _, in_cat_shp_file in enumerate(in_cat_shp_files): cat_vec = ogr.Open(in_cat_shp_file) lyr = cat_vec.GetLayer(0) spt_ref = lyr.GetSpatialRef() # print(spt_ref) trgt = osr.SpatialReference() trgt.ImportFromEPSG(other_args[1]) tfm = osr.CreateCoordinateTransformation(spt_ref, trgt) back_tfm = osr.CreateCoordinateTransformation(trgt, spt_ref) feat_dict = {} feat_area_dict = {} cat_area_ratios_dict = {} cat_envel_dict = {} feat = lyr.GetNextFeature() while feat: geom = feat.GetGeometryRef() f_val = feat.GetFieldAsString(str(other_args[0])) if f_val is None: raise RuntimeError feat_area_dict[f_val] = geom.Area() # do before transform geom.Transform(tfm) feat_dict[f_val] = feat cat_envel_dict[f_val] = geom.GetEnvelope() # do after transform feat = lyr.GetNextFeature() for in_net_cdf in in_net_cdf_files_list: print('Going through: %s' % in_net_cdf) in_nc = nc.Dataset(in_net_cdf) lat_arr = in_nc.variables[other_args[4]][:] lon_arr = in_nc.variables[other_args[3]][:] apply_cell_correc = other_args[6] # convert the netCDF time to regular time time_var = in_nc.variables[other_args[2]] time_arr = nc.num2date(in_nc.variables[other_args[2]][:], time_var.units, calendar='standard') # time_var.calendar) time_index = pd.date_range(start=time_arr[0], end=time_arr[-1], freq=time_freq) ppt_var = in_nc.variables[other_args[5]] print('Counting time from (in the netCDF file):', time_var.units) print('Start date in the netCDF: ', time_arr[0]) print('End date in the netCDF: ', time_arr[-1]) print('Total time steps in the netCDF: ', len(time_arr)) cell_size = round(lon_arr[1] - lon_arr[0], 3) x_l_c = lon_arr[0] x_u_c = lon_arr[-1] y_l_c = lat_arr[0] y_u_c = lat_arr[-1] flip_lr = False flip_ud = False if x_l_c > x_u_c: x_l_c, x_u_c = x_u_c, x_l_c flip_lr = True if y_l_c > y_u_c: y_l_c, y_u_c = y_u_c, y_l_c flip_ud = True if apply_cell_correc: x_l_c -= (cell_size / 2.) x_u_c -= (cell_size / 2.) y_l_c -= (cell_size / 2.) y_u_c -= (cell_size / 2.) x_coords = np.arange(x_l_c, x_u_c * 1.00000001, cell_size) y_coords = np.arange(y_l_c, y_u_c * 1.00000001, cell_size) cat_x_idxs_dict = {} cat_y_idxs_dict = {} print(feat_dict.keys()) for cat_no in feat_dict.keys(): print('Cat no:', cat_no) geom = feat_dict[cat_no].GetGeometryRef() extents = cat_envel_dict[cat_no] cat_area = feat_area_dict[cat_no] inter_areas = [] x_low, x_hi, y_low, y_hi = extents # adjustment to get all cells intersecting the polygon x_low = x_low - cell_size x_hi = x_hi + cell_size y_low = y_low - cell_size y_hi = y_hi + cell_size # print(x_coords, x_low, x_hi) x_cors_idxs = np.where( np.logical_and(x_coords >= x_low, x_coords <= x_hi))[0] y_cors_idxs = np.where( np.logical_and(y_coords >= y_low, y_coords <= y_hi))[0] x_cors = x_coords[x_cors_idxs] y_cors = y_coords[y_cors_idxs] # print(x_cors_idxs) cat_x_idxs = [] cat_y_idxs = [] for x_idx in range(x_cors.shape[0] - 1): for y_idx in range(y_cors.shape[0] - 1): ring = ogr.Geometry(ogr.wkbLinearRing) ring.AddPoint(x_cors[x_idx], y_cors[y_idx]) ring.AddPoint(x_cors[x_idx + 1], y_cors[y_idx]) ring.AddPoint(x_cors[x_idx + 1], y_cors[y_idx + 1]) ring.AddPoint(x_cors[x_idx], y_cors[y_idx + 1]) ring.AddPoint(x_cors[x_idx], y_cors[y_idx]) poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring) inter_poly = poly.Intersection(geom) # to get the area, I convert it to coordinate sys of # the shapefile that is hopefully in linear units inter_poly.Transform(back_tfm) inter_area = inter_poly.Area() inter_areas.append(inter_area) cat_x_idxs.append((x_cors[x_idx] - x_l_c) / cell_size) cat_y_idxs.append((y_cors[y_idx] - y_l_c) / cell_size) _dict_for_coords_int_[cat_no].append( (x_cors[x_idx], y_cors[y_idx])) cat_area_ratios_dict[cat_no] = np.divide(inter_areas, cat_area) cat_x_idxs_dict[cat_no] = np.int64(np.round(cat_x_idxs, 6)) cat_y_idxs_dict[cat_no] = np.int64(np.round(cat_y_idxs, 6)) for idx, date in enumerate(time_index): if date in sub_cat_out_ppt_data_df.index: all_ppt_vals = np.array(ppt_var[idx].data, dtype='float64') if flip_lr: all_ppt_vals = np.fliplr(all_ppt_vals) if flip_ud: all_ppt_vals = np.flipud(all_ppt_vals) for cat_no in feat_dict.keys(): ppt_vals = all_ppt_vals[cat_y_idxs_dict[cat_no], cat_x_idxs_dict[cat_no]] fin_ppt_vals = np.multiply(ppt_vals, cat_area_ratios_dict[cat_no]) sub_cat_out_ppt_data_df.loc[date][cat_no] = \ round(np.sum(fin_ppt_vals), 2) dict_for_gridded_data_[cat_no][date].append(ppt_vals) print('done getting data for', date) in_nc.close() cat_vec.Destroy() break return dict_for_gridded_data_, _dict_for_coords_int_
out_temp_units = 'C' # 273.15 is subtracted from all values #out_time_units = 'ordinal_hr' #out_lat_units = out_lon_units = 'degree' # read the input shapefile to get the extents in_shps = faizpy10_linux.list_full_path('.shp', in_shp_dir) xmin, xmax, ymin, ymax = 999., -999., 999., -999. for shp in in_shps: in_vec = ogr.Open(shp, 0) in_lyr = in_vec.GetLayer() ext = in_lyr.GetExtent() spt_ref = in_lyr.GetSpatialRef() trgt = osr.SpatialReference() trgt.ImportFromEPSG(nc_EPSG) tfm = osr.CreateCoordinateTransformation(spt_ref, trgt) xmin_ll, ymin_ll, z = tfm.TransformPoint(ext[0], ext[2]) xmax_ul, ymax_ul, z = tfm.TransformPoint(ext[1], ext[3]) in_vec.Destroy() if xmin_ll < xmin: xmin = xmin_ll if xmax_ul > xmax: xmax = xmax_ul if ymin_ll < ymin: ymin = ymin_ll if ymax_ul > ymax: ymax = ymax_ul
def extract_grid_ppt(sub_netcdf_list, in_coords_df, sub_out_cat_data_df, ppt_que): ''' Extract precipitation from a given list of netCDf files - The catchments shapefile can have one more catchment polygons. - Change names of variables and values inside the function, (not everything is specified through the arguments). ''' xs = in_coords_df[['X']].loc[sub_out_cat_data_df.columns].values ys = in_coords_df[['Y']].loc[sub_out_cat_data_df.columns].values spt_ref = osr.SpatialReference() spt_ref.ImportFromEPSG(32718) trgt = osr.SpatialReference() trgt.ImportFromEPSG(4326) tfm = osr.CreateCoordinateTransformation(spt_ref, trgt) xs_tfm = [] ys_tfm = [] for xy in zip(xs, ys): ppt_x, ppt_y = list(map(float, xy)) ppt_tfm_pts = tfm.TransformPoint(ppt_x, ppt_y) #print 'Orig XY:', xy #print 'Trans XY:', ppt_tfm_pts xs_tfm.append(ppt_tfm_pts[0]) ys_tfm.append(ppt_tfm_pts[1]) for netcdf in sub_netcdf_list: #print 'Going through: %s' % netcdf in_nc = nc.Dataset(netcdf) lat_arr = in_nc.variables['latitude'][:] lon_arr = in_nc.variables['longitude'][:] # convert the netCDF time to regular time time_var = in_nc.variables['time'] time_arr = nc.num2date(in_nc.variables['time'][:], time_var.units, calendar=time_var.calendar) ppt_var = in_nc.variables['precip'] ppt_lat_idx_list = [] ppt_lon_idx_list = [] for xy in zip(xs_tfm, ys_tfm): lat_idx = np.argmin(np.abs(lat_arr - xy[1])) lon_idx = np.argmin(np.abs(lon_arr - xy[0])) ppt_lat_idx_list.append(lat_idx) ppt_lon_idx_list.append(lon_idx) for idx, date in enumerate(time_arr): if date in sub_out_cat_data_df.index: daily_ppt_grid = ppt_var[idx] stn_ppt_values = daily_ppt_grid[ppt_lat_idx_list, ppt_lon_idx_list] sub_out_cat_data_df.loc[date][ sub_out_cat_data_df.columns] = stn_ppt_values #print '\n\n' in_nc.close() ppt_que.put(sub_out_cat_data_df) return None
def extract_points_in_culvert(culvert_excel_path, shapefile_path, out_shape_file_name, tri_shape_file="tri.shp", input_search_radius=50, input_time_range=-1, input_displacement_threshhold=0.5): search_radius = input_search_radius # #################### process input excel ################################ culverts = get_culverts_from_excel(culvert_excel_path) # ######################## input shapefile ######################### # message contains shapeFile information message = "" # create input driver driver = ogr.GetDriverByName('ESRI Shapefile') # input data source input_data = driver.Open(shapefile_path) in_layer = input_data.GetLayer() message += "input shape file contains " + str( in_layer.GetFeatureCount()) + " features\n" # get current coordinate system source_spacial_reference = in_layer.GetSpatialRef() message += "------ get_points_from_shape_file ------\n" message += "the Spatial Reference of the shapes is: " + str( source_spacial_reference) + "\n" # new spatial reference in lat/long ---- geographical coordinate system lat_long_sr = osr.SpatialReference() # lat_long_sr.SetWellKnownGeogCS("EPSG:4326") lat_long_sr.ImportFromEPSG(4326) # Create a Coordinate Transformation # (From the projected coordinate system:"NAD_1983_StatePlane_Virginia_North_FIPS_4501_Feet" # to geographic coordinate system: WGS84/EPSG:4326 ) # projection_coordinate_transformation = osr.CreateCoordinateTransformation(source_spacial_reference, lat_long_sr) # from lat/long to feet in "NAD_1983_StatePlane_Virginia_North_FIPS_4501_Feet" lat_long_to_feet_transformation = osr.CreateCoordinateTransformation( lat_long_sr, source_spacial_reference) # ##################### create output shapefile ################################# out_driver = ogr.GetDriverByName("ESRI Shapefile") out_shape_file = out_shape_file_name if __name__ == "__main__": print "out_shape_file", out_shape_file # Remove output shape file if it already exists if os.path.exists(out_shape_file): out_driver.DeleteDataSource(out_shape_file) # Create the output shape file out_data_source = out_driver.CreateDataSource(out_shape_file) out_lyr_name = os.path.splitext(os.path.split(out_shape_file)[1])[0] out_layer = out_data_source.CreateLayer(out_lyr_name, srs=source_spacial_reference, geom_type=ogr.wkbPoint) # Add input Layer Fields to the output Layer if it is the one we want in_layer_defn = in_layer.GetLayerDefn() for i in range(0, in_layer_defn.GetFieldCount()): out_field_defn = in_layer_defn.GetFieldDefn(i) out_layer.CreateField(out_field_defn) id_field = ogr.FieldDefn("FED_ID", ogr.OFTInteger) out_layer.CreateField(id_field) # ######################## create shapefile for culvert surface Triangles ######################### tri_driver = ogr.GetDriverByName("ESRI Shapefile") if __name__ == "__main__": print "tri_shape_file", tri_shape_file # Remove output shape file if it already exists if os.path.exists(tri_shape_file): tri_driver.DeleteDataSource(tri_shape_file) # Create the output shape file tri_data_source = tri_driver.CreateDataSource(tri_shape_file) tri_lyr_name = os.path.splitext(os.path.split(tri_shape_file)[1])[0] tri_layer = tri_data_source.CreateLayer(tri_lyr_name, srs=source_spacial_reference, geom_type=ogr.wkbPolygon) tri_layer.CreateField(id_field) area_change_field = ogr.FieldDefn("AreaChange", ogr.OFTReal) tri_layer.CreateField(area_change_field) # ######################################## Creating FeaturePoint Objects ########################################### features = {} all_features = {} counter = 0 feature_grouped_by_culvert_inner = {} feature_grouped_by_culvert_surrounding = {} for feature in in_layer: geo = feature.GetGeometryRef() if geo is None: continue # ignore DS points in output shapefile if feature["EFF_AREA"] != 0: continue new_point = (geo.GetX(), geo.GetY()) features[new_point] = dict(HEIGHT=feature["HEIGHT"], VEL=feature["VEL"], ACC=feature["ACC"], V_STD=feature["V_STDEV"], Feature=feature) all_features[new_point] = feature counter += 1 # ########################################### Construct KDT ######################################################## all_points = np.array([list(i) for i in features.keys()]) tree = spatial.KDTree(all_points) message += "------ End of get_points_from_shape_file------\n" if SHOW_MESSAGE: print message # ############################# Getting General Statistics for points in Culverts ################################## # avg_vel, avg_v_std, avg_displacement = data_summary(shapefile_path, "D20141124") # ##################################### Getting Nearest Points for Culverts ######################################## counter = 0 total_num_points_in_search_radius = 0 num_culvert_has_point = 0 # for each culvert in input file total_culvert_sum_vel = 0 total_culvert_sum_v_var = 0 total_outer_sum_vel = 0 total_outer_sum_v_var = 0 outer_num_data = 0 change_percent_summary = [] perm_p_value_summary = [] for culvert in culverts: culvert_sum_vel = 0 culvert_sum_v_var = 0 # permutation test result on median velocity of culvert and surrounding area # -1: test cannot be conducted # positive number: the p-value of permutation test result perm_result = -1 # Transform the lat/long of culvert into VA's feet coordinate system pt = ogr.Geometry(ogr.wkbPoint) pt.AddPoint(culvert.lg, culvert.lat) pt.Transform(lat_long_to_feet_transformation) culvert.x = pt.GetX() culvert.y = pt.GetY() # perform query with input search radius in KDT # and perform another search with twice radius, for comparison inner_result = tree.query_ball_point([pt.GetX(), pt.GetY()], search_radius) outer_result = tree.query_ball_point( [pt.GetX(), pt.GetY()], search_radius * OUTER_RADIUS_RATIO) surrounding_result = list(set(outer_result) - set(inner_result)) feature_grouped_by_culvert_inner[ culvert.federal_structure_id] = all_points[inner_result] total_num_points_in_search_radius += len(inner_result) feature_grouped_by_culvert_surrounding[ culvert.federal_structure_id] = all_points[surrounding_result] # get output layer's field definition out_layer_defn = out_layer.GetLayerDefn() # outer circle stat outer_sum_vel = 0 outer_sum_var = 0 if outer_result: for p in all_points[outer_result]: in_feature = features[tuple(p)]["Feature"] outer_sum_vel += in_feature["VEL"] outer_sum_var += in_feature["V_STDEV"]**2 outer_num_data += len(outer_result) outer_avg_vel = outer_sum_vel / len(outer_result) outer_v_std = math.sqrt(outer_sum_var / len(outer_result)) total_outer_sum_vel += outer_sum_vel total_outer_sum_v_var += outer_sum_var # conduct permutation test on median velocity of culvert and surrounding area only if there are enough data if len(surrounding_result) > 3 and len(inner_result) > 3: culvert_vel_list = [] surrounding_vel_list = [] for p in all_points[inner_result]: feature_i = features[tuple(p)]["Feature"] culvert_vel_list.append(feature_i["VEL"]) for p in all_points[surrounding_result]: feature_j = features[tuple(p)]["Feature"] surrounding_vel_list.append(feature_j["VEL"]) perm_result = exact_mc_perm_test(culvert_vel_list, surrounding_vel_list, PERM_NUM) culvert.perm_test_p_value = perm_result perm_p_value_summary.append(perm_result) # retrieve and store all feature points with in search radius into output layer if inner_result: num_culvert_has_point += 1 features_in_culvert = {} for p in all_points[inner_result]: # add all result InSAR feature points withing culvert radius to output shape file out_feature = ogr.Feature(out_layer_defn) in_feature = features[tuple(p)]["Feature"] features_in_culvert[tuple(p)] = in_feature out_feature.SetGeometry(in_feature.GetGeometryRef()) for i in range(0, in_layer_defn.GetFieldCount()): out_feature.SetField( out_layer_defn.GetFieldDefn(i).GetNameRef(), in_feature.GetField(i)) out_feature["FED_ID"] = culvert.federal_structure_id out_layer.CreateFeature(out_feature) culvert_sum_vel += in_feature["VEL"] total_culvert_sum_vel += in_feature["VEL"] culvert_sum_v_var += in_feature["V_STDEV"]**2 total_culvert_sum_v_var += in_feature["V_STDEV"]**2 # ###################### Triangulation Analysis ########################################################### if len(inner_result) > 4: counter += 1 change_percent, tri = \ triangulation_result_display(culvert=culvert, inner_result=inner_result, all_points=all_points, features=features_in_culvert, radius=input_search_radius, plot_num=counter) change_percent_summary.append(change_percent) inner_points = all_points[inner_result] culvert_mean_location = inner_points.mean(axis=0) points = inner_points - culvert_mean_location tri_num = tri.simplices.shape[0] for i in range(0, tri_num): # get 3 vertices of each tri simplice tri_i = tri.simplices[i, :] p0 = points[tri_i[0]] p1 = points[tri_i[1]] p2 = points[tri_i[2]] # restore to original location p0_ori = p0 + culvert_mean_location p1_ori = p1 + culvert_mean_location p2_ori = p2 + culvert_mean_location ring = ogr.Geometry(ogr.wkbLinearRing) ring.AddPoint(p0_ori[0], p0_ori[1]) ring.AddPoint(p1_ori[0], p1_ori[1]) ring.AddPoint(p2_ori[0], p2_ori[1]) poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring) tri_feature = ogr.Feature(tri_layer.GetLayerDefn()) tri_feature.SetGeometry(poly) tri_feature["FED_ID"] = culvert.federal_structure_id tri_feature["AreaChange"] = change_percent tri_layer.CreateFeature(tri_feature) # update culvert attribute culvert.tri_result = change_percent out_data_source.Destroy() print "total number of points within radius ", total_num_points_in_search_radius print "num of culverts: ", num_culvert_has_point print "on average each culvert has ", total_num_points_in_search_radius / num_culvert_has_point, " points" plt.figure() plt.hist(change_percent_summary) plt.figure() plt.hist(perm_p_value_summary) plt.show() return { "all_points": all_points, "inner": feature_grouped_by_culvert_inner, "surrounding": feature_grouped_by_culvert_surrounding, "culverts": culverts, "radius": input_search_radius, "features": features }
dem = ds.ReadAsArray() demInterp = interpolate.RectBivariateSpline(range(0, dem.shape[0]), range(0, dem.shape[1]), dem) ds = None # read in Geo CoOrds of GCP's (and height!) ds = gdal.OpenEx(gcpGeoLocFile, gdal.OF_VECTOR) if ds is None: print "Open failed./n" lyr = ds.GetLayerByIndex(0) lyr.ResetReading() gcpSpatialRef = lyr.GetSpatialRef() gcpToDemTransform = osr.CreateCoordinateTransformation(gcpSpatialRef, demSpatialRef) # gcpList = [] gcpDict = {} for (i, feat) in enumerate(lyr): if i > 190: break print '.', feat_defn = lyr.GetLayerDefn() f = {} for i in range(feat_defn.GetFieldCount()): field_defn = feat_defn.GetFieldDefn(i) f[field_defn.GetName()] = feat.GetField(i) geom = feat.GetGeometryRef() if geom is not None and (geom.GetGeometryType() == ogr.wkbPoint or geom.GetGeometryType() == ogr.wkbPoint25D):
saveout = sys.stdout fsock = open(hist_file_loc, 'w') sys.stdout = fsock print '\a\a\a\a Start \a\a\a\a\n' start = timeit.default_timer() # to get the runtime of the program print '\a\a\a Main starting time is: ', dt.datetime.now(), '\n\n\n' # read the ppt stations shapefile and create a # coordinate transformation between the shapefile and the netCDF in_ppt_ds = ogr.Open(in_ppt_shp) ppt_lyr = in_ppt_ds.GetLayer() ppt_spt_ref = ppt_lyr.GetSpatialRef() ppt_trgt = osr.SpatialReference() ppt_trgt.ImportFromEPSG(nc_EPSG) ppt_tfm = osr.CreateCoordinateTransformation(ppt_spt_ref, ppt_trgt) # read the temp stations shapefile and create a # coordinate transformation between the shapefile and the netCDF in_temp_ds = ogr.Open(in_temp_shp) temp_lyr = in_temp_ds.GetLayer() temp_spt_ref = temp_lyr.GetSpatialRef() temp_trgt = osr.SpatialReference() temp_trgt.ImportFromEPSG(nc_EPSG) temp_tfm = osr.CreateCoordinateTransformation(temp_spt_ref, temp_trgt) create_new_dir(out_csv_dir) # create the output directory # go through each file in the given input directory and # preform the required actions