def join_csv2shp(shapefile, shp_joinfield, csvfile, csv_joinfield, out_shapefile, how='outer'): ''' add attribute information to shapefile from csv file shapefile: shapefile to add attributes to shp_joinfield: attribute name in shapefile on which to make join csvfile: csv file with information to be added to shapefile csv_joinfield: column in csv with entries matching those in shp_joinfield out_shapefile: output; original shapefile is not modified type: pandas join type; see http://pandas.pydata.org/pandas-docs/dev/generated/pandas.DataFrame.join.html ''' shpdf = GISio.shp2df(shapefile, index=shp_joinfield, geometry=True) csvdf = pd.read_csv(csvfile, index_col=csv_joinfield) print('joining to {}...'.format(csvfile)) joined = shpdf.join(csvdf, how='inner', lsuffix='L', rsuffix='R') # write to shapefile GISio.df2shp(joined, out_shapefile, 'geometry', shapefile[:-4] + '.prj')
def dissolve(inshp, outshp, dissolve_attribute=None): df = GISio.shp2df(inshp) df_out = dissolve_df(df, dissolve_attribute) # write dissolved polygons to new shapefile GISio.df2shp(df_out, outshp, prj=inshp[:-4]+'.prj')
def dissolve(inshp, outshp, dissolve_attribute): df = GISio.shp2df(shp, geometry=True) df_out = dissolve_df(df, dissolve_attribute) # write dissolved polygons to new shapefile GISio.df2shp(df_out, outshp, 'geometry', inshp[:-4]+'.prj')
def dissolve(inshp, outshp, dissolve_attribute=None): df = GISio.shp2df(inshp) df_out = dissolve_df(df, dissolve_attribute) # write dissolved polygons to new shapefile GISio.df2shp(df_out, outshp, prj=inshp[:-4] + '.prj')
def join_csv2shp(shapefile, shp_joinfield, csvfile, csv_joinfield, out_shapefile, how='outer'): ''' add attribute information to shapefile from csv file shapefile: shapefile to add attributes to shp_joinfield: attribute name in shapefile on which to make join csvfile: csv file with information to be added to shapefile csv_joinfield: column in csv with entries matching those in shp_joinfield out_shapefile: output; original shapefile is not modified type: pandas join type; see http://pandas.pydata.org/pandas-docs/dev/generated/pandas.DataFrame.join.html ''' shpdf = GISio.shp2df(shapefile, index=shp_joinfield, geometry=True) csvdf = pd.read_csv(csvfile, index_col=csv_joinfield) print('joining to {}...'.format(csvfile)) joined = shpdf.join(csvdf, how='inner', lsuffix='L', rsuffix='R') # write to shapefile GISio.df2shp(joined, out_shapefile, 'geometry', shapefile[:-4]+'.prj')
def write_shp(self, df, shpname='NWIS_export.shp'): """Write a shapefile of points from NWIS site file Parameters ---------- df: dataframe dataframe of site info, must have dec_long_va and dec_lat_va columns with lon/lat in DD shpname: string Name for output shapefile Notes ----- NAD83 is assumed for dec_long_va and dec_lat_va. If some entries are in NAD27, a difference of ~5 to >15m will result for WI (see http://en.wikipedia.org/wiki/North_American_Datum#/media/File:Datum_Shift_Between_NAD27_and_NAD83.png) """ shpdf = df.copy() shpdf['geometry'] = [Point(r.dec_long_va, r.dec_lat_va) for i, r in shpdf.iterrows()] GISio.df2shp(shpdf, shpname, epsg=4269)
def write_shp(self, df, shpname='NWIS_export.shp', **kwargs): """Write a shapefile of points from NWIS site file Parameters ---------- df: dataframe dataframe of site info, must have dec_long_va and dec_lat_va columns with lon/lat in DD shpname: string Name for output shapefile Notes ----- NAD83 is assumed for dec_long_va and dec_lat_va. If some entries are in NAD27, a difference of ~5 to >15m will result for WI (see http://en.wikipedia.org/wiki/North_American_Datum#/media/File:Datum_Shift_Between_NAD27_and_NAD83.png) """ shpdf = df.copy() shpdf['geometry'] = [Point(r.dec_long_va, r.dec_lat_va) for i, r in shpdf.iterrows()] GISio.df2shp(shpdf, shpname, epsg=4269)
print 'building UZF package IRUNBND array from {}'.format(MFgrid) MFgrid_joined = GISio.shp2df(os.path.join(os.getcwd(), 'MFgrid_catchments.shp'), geometry=True) MFgrid_joined.index = MFgrid_joined.node nrows, ncols = np.max(MFgrid_joined.row), np.max(MFgrid_joined.column) # make new column of SFR segment for each grid cell MFgrid_joined['segment'] = MFgrid_joined.FEATUREID.apply( segments_dict.get).fillna(0) print 'writing {}'.format(out_IRUNBND) # should add code to allow for a dataframe that only includes a subset of model cells # (could build a DF of zeros for each cellnum, and then merge with DF containing UZF cells, replacing the zeros for those cells IRUNBND = np.reshape(MFgrid_joined['segment'].sort_index().values, (nrows, ncols)) np.savetxt(out_IRUNBND, IRUNBND, fmt='%i', delimiter=' ') print 'writing {}'.format(out_IRUNBND_shp) #df, shpname, geo_column, prj GISio.df2shp(MFgrid_joined, os.path.join(os.getcwd(), 'MFgrid_segments.shp'), 'geometry', os.path.join(os.getcwd(), 'MFgrid_catchments.shp')[:-4] + '.prj') MFgrid_joined_dissolved = GISops.dissolve_df(MFgrid_joined, 'segment') GISio.df2shp(MFgrid_joined_dissolved, os.path.join(os.getcwd(), 'MFgrid_segments_dissolved.shp'), 'geometry', os.path.join(os.getcwd(), 'MFgrid_catchments.shp')[:-4] + '.prj')
segments_dict[cmt] = segment # can also use values_count() to get a frequency table for segments (reaches) in each catchment print 'building UZF package IRUNBND array from {}'.format(MFgrid) MFgrid_joined = GISio.shp2df(os.path.join(os.getcwd(), 'MFgrid_catchments.shp'), geometry=True) MFgrid_joined.index = MFgrid_joined.node nrows, ncols = np.max(MFgrid_joined.row), np.max(MFgrid_joined.column) # make new column of SFR segment for each grid cell MFgrid_joined['segment'] = MFgrid_joined.FEATUREID.apply(segments_dict.get).fillna(0) print 'writing {}'.format(out_IRUNBND) # should add code to allow for a dataframe that only includes a subset of model cells # (could build a DF of zeros for each cellnum, and then merge with DF containing UZF cells, replacing the zeros for those cells IRUNBND = np.reshape(MFgrid_joined['segment'].sort_index().values, (nrows, ncols)) np.savetxt(out_IRUNBND, IRUNBND, fmt='%i', delimiter=' ') print 'writing {}'.format(out_IRUNBND_shp) #df, shpname, geo_column, prj GISio.df2shp(MFgrid_joined, os.path.join(os.getcwd(), 'MFgrid_segments.shp'), 'geometry', os.path.join(os.getcwd(), 'MFgrid_catchments.shp')[:-4]+'.prj') MFgrid_joined_dissolved = GISops.dissolve_df(MFgrid_joined, 'segment') GISio.df2shp(MFgrid_joined_dissolved, os.path.join(os.getcwd(), 'MFgrid_segments_dissolved.shp'), 'geometry', os.path.join(os.getcwd(), 'MFgrid_catchments.shp')[:-4]+'.prj')
break else: knt += 1 return columns, knt # read in NWIS site information and study area boundary header_text = open(NWIS_site_info_file).readlines() columns, header_rows = NWIS_header(header_text) df = pd.read_csv(NWIS_site_info_file, sep='\t', names=columns, skiprows=header_rows) bounds = GISio.shp2df(model_domain_polygon, geometry=True).geometry[0] # make geomtries for each station, and drop stations not in the study area df['geometry'] = df.apply(lambda x: Point(x['dec_long_va'], x['dec_lat_va']), axis=1) GISio.df2shp(df, 'D:/ATLData/GFL files/Great_Divide/flux_targets/NWIS_sites_all.shp', prj='epsg:4269') within = [p.within(bounds) for p in df.geometry] df = df[within] GISio.df2shp(df, NWIS_site_info_file[:-4]+'.shp', prj='epsg:4269') # now do spatial join of NWIS locations to NHD comids arcpy.SpatialJoin_analysis(NWIS_site_info_file[:-4]+'.shp', flowlines_clipped, NWIS_site_info_file[:-4]+'_joined.shp', "JOIN_ONE_TO_ONE", "KEEP_ALL", '', "WITHIN_A_DISTANCE", .001) # now read back in and make a csv file for input into flux_targets.py df = GISio.shp2df(NWIS_site_info_file[:-4]+'_joined.shp') site_info = df[['site_no', 'COMID']] # read in NWIS measurements
# read in NWIS site information and study area boundary header_text = open(NWIS_site_info_file).readlines() columns, header_rows = NWIS_header(header_text) df = pd.read_csv(NWIS_site_info_file, sep='\t', names=columns, skiprows=header_rows) bounds = GISio.shp2df(model_domain_polygon, geometry=True).geometry[0] # make geomtries for each station, and drop stations not in the study area df['geometry'] = df.apply(lambda x: Point(x['dec_long_va'], x['dec_lat_va']), axis=1) GISio.df2shp( df, 'D:/ATLData/GFL files/Great_Divide/flux_targets/NWIS_sites_all.shp', prj='epsg:4269') within = [p.within(bounds) for p in df.geometry] df = df[within] GISio.df2shp(df, NWIS_site_info_file[:-4] + '.shp', prj='epsg:4269') # now do spatial join of NWIS locations to NHD comids arcpy.SpatialJoin_analysis(NWIS_site_info_file[:-4] + '.shp', flowlines_clipped, NWIS_site_info_file[:-4] + '_joined.shp', "JOIN_ONE_TO_ONE", "KEEP_ALL", '', "WITHIN_A_DISTANCE", .001) # now read back in and make a csv file for input into flux_targets.py df = GISio.shp2df(NWIS_site_info_file[:-4] + '_joined.shp')
logq = np.log10(q) qs_1e6 = q * 1e6 logqs_1e6 = np.log10(qs_1e6) # scale specific discharge to easier units for plotting df = pd.DataFrame({'cellnum': cellnums.flatten(), 'x': xy[0].flatten(), 'y': xy[1].flatten(), 'U': U.flatten(), 'V': V.flatten(), 'Z': Z.flatten(), 'Q': Q.flatten(), 'logQ': logQ.flatten(), 'us': u.flatten(), 'vs': v.flatten(), 'zs': z.flatten(), 'qs_1e6': qs_1e6.flatten(), 'logqs_1e6': logqs_1e6.flatten(), 'rot': rot.flatten()}) df['updown'] = None df.ix[df['zs'] > 0, 'updown'] = 'down' df.ix[df['zs'] < 0, 'updown'] = 'up' df['geometry'] = [Point(df.ix[i, 'x'], df.ix[i, 'y']) for i in df.index] GISio.df2shp(df, os.path.join(path, 'cbb_arrows{}.shp'.format(l+1)), 'geometry', prj=PRJfile)