def LinearRegressionRawData(DataDirectory, DEM_prefix, basin_list=[], parallel=False): """ This function performs a linear regression on all of the slope-area data. It returns a dataframe with the linear regression info for each basin key. Args: DataDirectory (str): the data directory DEM_prefix (str): the prefix of the DEM_prefix basin_list: a list of the basins to analyse, default = empty (all basins) Returns: pandas dataframe with the linear regression info Author: SMM and FJC """ # read in binned data if not parallel: df = Helper.ReadRawSAData(DataDirectory, DEM_prefix) else: df = Helper.AppendRawSAData(DataDirectory, DEM_prefix) # get a list of the basins if needed if basin_list == []: print( "You didn't give me a basin list so I will analyse all the basins") basin_list = df['basin_key'].unique() # now do a linear regression for each basin columns = ['basin_key', 'regression_slope', 'std_err', 'R2', 'p_value'] OutDF = pd.DataFrame(columns=columns) for basin_key in basin_list: df_slope = (df[df['basin_key'] == basin_key]['slope']).values df_area = (df[df['basin_key'] == basin_key]['drainage_area']).values logS = np.log10(df_slope[df_area != 0]) logA = np.log10(df_area[df_area != 0]) slope, intercept, r_value, p_value, std_err = stats.linregress( logA, logS) #print("Slope: " +str(slope)+ " std_err: "+str(std_err)+ " R2 is: " + str(r_value**2) + " p value is: " + str(p_value)) this_key = int(basin_key) this_row = [this_key, abs(slope), std_err, r_value**2, p_value] OutDF.loc[basin_key] = this_row return OutDF
def concavityCatcher(full_path,write_name,processed=False,disorder_based=False,basins_not_glaciated=[]): print("opened concavityCatcher") #returns the basin_key and median concavity write_name = '/'+write_name #reading in the basin info if not disorder_based: BasinDF = Helper.ReadMCPointsCSV(full_path,write_name) #Getting mn data PointsDF = MN.GetMOverNRangeMCPoints(BasinDF,start_movern=0.25,d_movern=0.05,n_movern=8) #extract basin key and concavity as list basin_series = PointsDF["basin_key"] concavity_series = PointsDF["Median_MOverNs"] basin_key = basin_series.tolist() basin_keys = [] for x in basin_key: x = int(x) basin_keys.append(x) concavities = concavity_series.tolist() if not processed: return basin_keys,concavities if processed: #processed_concavities = [] #for x in basins_not_glaciated: processedDF = PointsDF[PointsDF.basin_key.isin(basins_not_glaciated)] #print("this is the processed DF") #print processedDF processed_basin = processedDF["basin_key"] processed_concavity = processedDF["Median_MOverNs"] basin_list = processed_basin.tolist() concavity_list = processed_concavity.tolist() return basin_list,concavity_list if disorder_based: print("getting basin keys") infoDF = Helper.ReadBasinInfoCSV(full_path,write_name) #print infoDF basinSeries = infoDF["basin_key"] basin_key = basinSeries.tolist() basin_keys = [] for x in basin_key: x = int(x) basin_keys.append(x) print("got keys, getting disorder") disorder_concavity = getDisorderConcavity(full_path,write_name,basin_keys) print("concavities:") print disorder_concavity return basin_keys,disorder_concavity
def concavityCatcher(full_path, write_name, processed=False): #returns the basin_key and median concavity write_name = '/' + write_name #reading in the basin info BasinDF = Helper.ReadMCPointsCSV(full_path, write_name) #Getting mn data PointsDF = MN.GetMOverNRangeMCPoints(BasinDF, start_movern=0.1, d_movern=0.05, n_movern=18) #extract basin key and concavity as list basin_series = PointsDF["basin_key"] concavity_series = PointsDF["Median_MOverNs"] basin_key = basin_series.tolist() basin_keys = [] for x in basin_key: x = int(x) basin_keys.append(x) concavities = concavity_series.tolist() if not processed: return basin_keys, concavities if processed: #processed_concavities = [] #print("this is the processed DF") #print processedDF processed_basin = processedDF["basin_key"] processed_concavity = processedDF["Median_MOverNs"] basin_list = processed_basin.tolist() concavity_list = processed_concavity.tolist() return basin_list, concavity_list
def GetMultipleBasinOutlines(DataDirectory): """ This function takes in multiple rasters of basins and gets a dict of basin polygons, where the key is the basin key derived from the file name and the value is a shapely polygon of the basin. IMPORTANT: In this case the "basin key" is usually the junction number: this function will use the raster values as keys and in general the basin rasters are output based on junction indices rather than keys Args: DataDirectory (str): the data directory with the basin raster Returns: list of shapely polygons with the basins Author: MDH """ # get a list of basins and declare the dictionary to populate basin_dict = Helper.MapBasinsToKeys(DataDirectory) BasinsDict = {} #loop across the basins for outlet_jn, basin_key in basin_dict.iteritems(): this_fname = "basin" + str(outlet_jn) + "_AllBasins.bil" TempBasins = GetBasinOutlines(DataDirectory, this_fname) for temp_outlet, temp_basin_key in TempBasins.iteritems(): if len(TempBasins) > 1: print("WARNING: MULTIPLE BASINS IN basin #", outlet_jn) TempBasins[int(outlet_jn)] = TempBasins.pop(temp_outlet) BasinsDict.update(TempBasins) return BasinsDict
def concavityCatcher(full_path, write_name, processed=False, basins_not_glaciated=[]): #returns the basin_key and median concavity write_name = '/' + write_name #reading in the basin info BasinDF = Helper.ReadMCPointsCSV(full_path, write_name) #Getting mn data PointsDF = MN.GetMOverNRangeMCPoints(BasinDF, start_movern=0.25, d_movern=0.05, n_movern=8) #extract basin key and concavity as list basin_series = PointsDF["basin_key"] concavity_series = PointsDF["Median_MOverNs"] basin_key = basin_series.tolist() basin_keys = [] for x in basin_key: x = int(x) basin_keys.append(x) concavities = concavity_series.tolist() if not processed: return basin_keys, concavities if processed: #processed_concavities = [] #for x in basins_not_glaciated: processedDF = PointsDF[PointsDF.basin_key.isin(basins_not_glaciated)] print("this is the processed DF") print processedDF if basin_key != basins_not_glaciated: sys.exit()
def DoesBasinInfoExist(DataDir,fname_prefix): """ This function checks to see if there is an AllBasinsInfo file, which is produced by the LSDTopoTools basin extraction routines in the chi_mapping_tool. Args: DataDir (str): The name of the data directory fname_prefix (str): The prefix of the raster file to be analysed Returns: BasinInfoDF (pandas dataframe): The basin info in a pandas dataframe exising_basin_keys (int list): a list of integers with the basin keys Author: SMM Date 30/01/2017 """ # See if a basin info file exists and if so get the basin list print("Let me check if there is a basins info csv file.") BasinInfoPrefix = fname_prefix+"_AllBasinsInfo.csv" BasinInfoFileName = DataDir+BasinInfoPrefix existing_basin_keys = [] DF = pd.DataFrame() if os.path.isfile(BasinInfoFileName): print("There is a basins info csv file") BasinInfoDF = phelp.ReadBasinInfoCSV(DataDir, fname_prefix) existing_basin_keys = list(BasinInfoDF['basin_key']) existing_basin_keys = [int(x) for x in existing_basin_keys] DF=BasinInfoDF else: print("I didn't find a basins info csv file. Check directory or filename.") return DF, existing_basin_keys
def LinearRegressionSegmentedData(DataDirectory, DEM_prefix, basin_list=[]): """ This function performs a linear regression on each of the segments for the SA data. Args: DataDirectory (str): the data directory DEM_prefix (str): the prefix of the DEM_prefix basin_list: a list of the basins to analyse, default = empty (all basins) Returns: pandas dataframe with the linear regression info Author: FJC """ df = Helper.ReadSegmentedSAData(DataDirectory, DEM_prefix) # get a list of the basins if needed if basin_list == []: print( "You didn't give me a basin list so I will analyse all the basins") basin_list = df['basin_key'].unique() columns = [ 'basin_key', 'segment_number', 'regression_slope', 'std_err', 'R2', 'p_value' ] OutDF = pd.DataFrame(columns=columns) counter = 0 # get the segments for each basin for basin_key in basin_list: print("THIS BASIN IS: " + str(basin_key)) SegmentDF = df[df['basin_key'] == basin_key] # get the data for each individual segment number segments = SegmentDF['segment_number'].unique() for segment_no in segments: print("Segment number is: " + str(segment_no)) ThisDF = SegmentDF[SegmentDF['segment_number'] == segment_no] #now regress the data for this segment to get the best fit m/n median_log_S = ThisDF['median_log_S'] median_log_A = ThisDF['median_log_A'] slope, intercept, r_value, p_value, std_err = stats.linregress( median_log_A, median_log_S) print("Slope: " + str(slope) + " std_err: " + str(std_err) + " R2 is: " + str(r_value**2) + " p value is: " + str(p_value) + " intercept is: " + str(intercept)) this_key = int(basin_key) this_row = [ this_key, int(segment_no), slope, std_err, r_value**2, p_value ] OutDF.loc[counter] = this_row counter += 1 return OutDF
def SelectTerracesFromShapefile(DataDirectory, shapefile_name, fname_prefix): """ This function takes in a shapefile of digitised terraces and uses it to filter the terrace DF. Only pixels within each shapefile are kept, and they are assigned a new ID based on the ID of the shapefile polygons. Args: DataDirectory (str): the data directory shapefile_name (str): the name of the shapefile fname_prefix (str): prefix of the DEM Returns: terrace df filtered by the digitised terraces Author: FJC """ # first get the terrace df terrace_df = H.read_terrace_csv(DataDirectory, fname_prefix) # now get the shapefile with the digitised terraces digitised_terraces = H.read_terrace_shapefile(DataDirectory, shapefile_name) # for each point in the df, need to check if it is in one of the polygons. This will probably be slow. # set up the new terrace df new_df = pd.DataFrame() print("Filtering points by shapefile, this might take a while...") for idx, row in terrace_df.iterrows(): this_point = Point(row['X'], row['Y']) #print this_point # check if this point is in one of the polygons for id, polygon in digitised_terraces.items(): if polygon.contains(this_point): # this point is within this terrace, keep it and assign a new ID number row['TerraceID'] = id new_df = new_df.append(row) OutDF_name = "_terrace_info_shapefiles.csv" OutDF_name = DataDirectory + fname_prefix + OutDF_name new_df.to_csv(OutDF_name, index=False) return new_df
def SelectTerracePointsFromCentrelines(DataDirectory, shapefile_name, fname_prefix, distance=2): """ This function takes in a shapefile of digitised terrace centrelines and finds points within a certain distance of the line. Returns as a df. Args: DataDirectory (str): the data directory shapefile_name (str): the name of the shapefile fname_prefix (str): prefix of the DEM Returns: terrace df filtered by the digitised terraces Author: FJC """ # first get the terrace df terrace_df = H.read_terrace_csv(DataDirectory, fname_prefix) # now get the shapefile with the digitised terraces centrelines = H.read_terrace_centrelines(DataDirectory, shapefile_name) # for each point in the df, need to check if it is in one of the polygons. This will probably be slow. # set up the new terrace df new_df = pd.DataFrame() print("Filtering points by shapefile, this might take a while...") for idx, row in terrace_df.iterrows(): this_point = Point(row['X'], row['Y']) #print this_point # check if this point is in one of the polygons for id, line in centrelines.items(): if line.distance(this_point) < distance: # this point is within this terrace, keep it and assign a new ID number row['TerraceID'] = id new_df = new_df.append(row) OutDF_name = "_terrace_info_centrelines.csv" OutDF_name = DataDirectory + fname_prefix + OutDF_name new_df.to_csv(OutDF_name, index=False) return new_df
def PlotBasinPerimeter(DataDirectory, fname_prefix, size_format='ESURF', FigFormat='png'): """ Make a plot of the basin perimeter ordered by the outlet Args: DataDirectory (str): the data directory fname_prefix (str): filename of the DEM without extension size_format (str): Can be "big" (16 inches wide), "geomorphology" (6.25 inches wide), or "ESURF" (4.92 inches wide) (defualt esurf). FigFormat (str): The format of the figure. Usually 'png' or 'pdf'. If "show" then it calls the matplotlib show() command. Author: FJC """ # check if a directory exists for the perimeter plots. If not then make it. this_dir = DataDirectory + 'basin_perimeters/' if not os.path.isdir(this_dir): os.makedirs(this_dir) # Set up fonts for plots label_size = 10 rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['Liberation Sans'] rcParams['font.size'] = label_size # make a figure if size_format == "geomorphology": fig = plt.figure(1, facecolor='white', figsize=(6.25, 3.5)) #l_pad = -40 elif size_format == "big": fig = plt.figure(1, facecolor='white', figsize=(16, 9)) #l_pad = -50 else: fig = plt.figure(1, facecolor='white', figsize=(4.92126, 3.2)) #l_pad = -35 PerimeterDF = Helper.ReadPerimeterCSV(DataDirectory, fname_prefix) gs = plt.GridSpec(100, 100, bottom=0.15, left=0.05, right=0.95, top=0.95) ax = fig.add_subplot(gs[5:100, 10:95]) # plot the data ax.plot(PerimeterDF['node_key'], PerimeterDF['elevation']) # set the axis labels ax.set_xlabel('Perimeter node ordered from outlet') ax.set_ylabel('Node elevation') newFilename = this_dir + fname_prefix + "_basin_perimeter." + FigFormat plt.savefig(newFilename, format=FigFormat, dpi=300) ax.cla() plt.close(fig)
def basinsWithDesiredConcavity(full_path, write_name, concavity): #returns a list of basins with the desired concavity write_name = '/' + write_name #reading in the basin info BasinDF = Helper.ReadMCPointsCSV(full_path, write_name) #Getting mn data PointsDF = MN.GetMOverNRangeMCPoints(BasinDF, start_movern=0.1, d_movern=0.05, n_movern=18) #selecting by concavity selectedDF = PointsDF[PointsDF["Median_MOverNs"] == concavity] concavitySeries = selectedDF["basin_key"] concavityList = concavitySeries.tolist() return concavityList
def concavityCatcher(full_path,write_name): #returns the basin_key and median concavity write_name = '\\'+write_name #reading in the basin info BasinDF = Helper.ReadMCPointsCSV(full_path,write_name) #Getting mn data PointsDF = MN.GetMOverNRangeMCPoints(BasinDF,start_movern=0.25,d_movern=0.05,n_movern=8) #extract basin key and concavity as list basin_series = PointsDF["basin_key"] concavity_series = PointsDF["Median_MOverNs"] basin_key = basin_series.tolist() basin_keys = [] for x in basin_key: x = int(x) basin_keys.append(x) concavities = concavity_series.tolist() return basin_keys,concavities
def write_dip_and_dipdir_to_csv(DataDirectory, fname_prefix, digitised_terraces=False, shapefile_name=None): """ Wrapper for dip and dipdir function Args: DataDirectory (str): the data directory fname_prefix (str): name of the DEM digitised_terraces (bool): boolean to use digitised terrace shapefile shapefile_name (str): name of shapefile Author: FJC """ # read in the terrace csv terraces = H.read_terrace_csv(DataDirectory, fname_prefix) if digitised_terraces: # check if you've already done the selection, if so just read in the csv print("File name is", DataDirectory + fname_prefix + '_terrace_info_shapefiles.csv') if os.path.isfile(DataDirectory + fname_prefix + '_terrace_info_shapefiles.csv'): terraces = pd.read_csv(DataDirectory + fname_prefix + '_terrace_info_shapefiles.csv') else: terraces = SelectTerracesFromShapefile(DataDirectory, shapefile_name, fname_prefix) else: filter_terraces(terraces, min_size) # get the terrace dip and dip dirs terrace_dips = get_terrace_dip_and_dipdir(terraces) # write to csv terrace_dips.to_csv(DataDirectory + fname_prefix + '_Dip_DipDirection.csv')
def PrintBasins(DataDirectory, fname_prefix, add_basin_labels=True, cmap="jet", cbar_loc="right", size_format="ESURF", fig_format="png", dpi=250, out_fname_prefix=""): """ This function makes a shaded relief plot of the DEM with the basins coloured by the basin ID. IMPORTANT: To get this to run you need to set the flags in chi mapping tool to: write_hillshade: true print_basin_raster: true print_chi_data_maps: true Args: DataDirectory (str): the data directory with the m/n csv files fname_prefix (str): The prefix for the m/n csv files add_basin_labels (bool): If true, label the basins with text. Otherwise use a colourbar. cmap (str or colourmap): The colourmap to use for the plot cbar_lox (str): where you want the colourbar. Options are none, left, right, top and botton. The colourbar will be of the elevation. If you want only a hillshade set to none and the cmap to "gray" size_format (str): Either geomorphology or big. Anything else gets you a 4.9 inch wide figure (standard ESURF size) fig_format (str): An image format. png, pdf, eps, svg all valid dpi (int): The dots per inch of the figure out_fname_prefix (str): The prefix of the image file. If blank uses the fname_prefix Returns: Shaded relief plot with the basins coloured by basin ID. Uses a colourbar to show each basin Author: FJC, SMM """ #import modules from LSDMapFigure.PlottingRaster import MapFigure # set figure sizes based on format if size_format == "geomorphology": fig_width_inches = 6.25 elif size_format == "big": fig_width_inches = 16 else: fig_width_inches = 4.92126 # get the basin IDs to make a discrete colourmap for each ID BasinInfoDF = PlotHelp.ReadBasinInfoCSV(DataDirectory, fname_prefix) basin_keys = list(BasinInfoDF['basin_key']) basin_keys = [int(x) for x in basin_keys] basin_junctions = list(BasinInfoDF['outlet_junction']) basin_junctions = [float(x) for x in basin_junctions] print('Basin keys are: ') print(basin_keys) # going to make the basin plots - need to have bil extensions. print( "I'm going to make the basin plots. Your topographic data must be in ENVI bil format or I'll break!!" ) # get the rasters raster_ext = '.bil' #BackgroundRasterName = fname_prefix+raster_ext HillshadeName = fname_prefix + '_hs' + raster_ext BasinsName = fname_prefix + '_AllBasins' + raster_ext print(BasinsName) Basins = LSDP.GetBasinOutlines(DataDirectory, BasinsName) # If wanted, add the labels if add_basin_labels: print("I am going to add basin labels, there will be no colourbar.") MF = MapFigure(HillshadeName, DataDirectory, coord_type="UTM_km", colourbar_location="None") MF.plot_polygon_outlines(Basins, linewidth=0.8) MF.add_drape_image(BasinsName, DataDirectory, colourmap=cmap, alpha=0.8, colorbarlabel='Basin ID', discrete_cmap=True, n_colours=len(basin_keys), show_colourbar=False, modify_raster_values=True, old_values=basin_junctions, new_values=basin_keys, cbar_type=int) # This is used to label the basins label_dict = dict(zip(basin_junctions, basin_keys)) # this dict has the basin junction as the key and the basin_key as the value Points = LSDP.GetPointWithinBasins(DataDirectory, BasinsName) MF.add_text_annotation_from_shapely_points(Points, text_colour='k', label_dict=label_dict) else: print("I am showing the basins without text labels.") MF = MapFigure(HillshadeName, DataDirectory, coord_type="UTM_km", colourbar_location=cbar_loc) MF.plot_polygon_outlines(Basins, linewidth=0.8) MF.add_drape_image(BasinsName, DataDirectory, colourmap=cmap, alpha=0.8, colorbarlabel='Basin ID', discrete_cmap=True, n_colours=len(basin_keys), show_colourbar=True, modify_raster_values=True, old_values=basin_junctions, new_values=basin_keys, cbar_type=int) # Save the image if len(out_fname_prefix) == 0: ImageName = DataDirectory + fname_prefix + "_basins." + fig_format else: ImageName = DataDirectory + out_fname_prefix + "_basins." + fig_format MF.save_fig(fig_width_inches=fig_width_inches, FigFileName=ImageName, FigFormat=fig_format, Fig_dpi=dpi) # Save the figure
def PrintChannelsAndBasins(DataDirectory, fname_prefix, add_basin_labels=True, cmap="jet", cbar_loc="right", size_format="ESURF", fig_format="png", dpi=250, out_fname_prefix=""): """ This function prints a channel map over a hillshade. Args: DataDirectory (str): the data directory with the m/n csv files fname_prefix (str): The prefix for the m/n csv files add_basin_labels (bool): If true, label the basins with text. Otherwise use a colourbar. cmap (str or colourmap): The colourmap to use for the plot cbar_lox (str): where you want the colourbar. Options are none, left, right, top and botton. The colourbar will be of the elevation. If you want only a hillshade set to none and the cmap to "gray" size_format (str): Either geomorphology or big. Anything else gets you a 4.9 inch wide figure (standard ESURF size) fig_format (str): An image format. png, pdf, eps, svg all valid dpi (int): The dots per inch of the figure out_fname_prefix (str): The prefix of the image file. If blank uses the fname_prefix Returns: Shaded relief plot with the basins coloured by basin ID. Uses a colourbar to show each basin Author: SMM """ # specify the figure size and format # set figure sizes based on format if size_format == "geomorphology": fig_size_inches = 6.25 elif size_format == "big": fig_size_inches = 16 else: fig_size_inches = 4.92126 ax_style = "Normal" # get the basin IDs to make a discrete colourmap for each ID BasinInfoDF = PlotHelp.ReadBasinInfoCSV(DataDirectory, fname_prefix) basin_keys = list(BasinInfoDF['basin_key']) basin_keys = [int(x) for x in basin_keys] basin_junctions = list(BasinInfoDF['outlet_junction']) basin_junctions = [float(x) for x in basin_junctions] print('Basin keys are: ') print(basin_keys) # going to make the basin plots - need to have bil extensions. print( "I'm going to make the basin plots. Your topographic data must be in ENVI bil format or I'll break!!" ) # get the rasters raster_ext = '.bil' #BackgroundRasterName = fname_prefix+raster_ext HillshadeName = fname_prefix + '_hs' + raster_ext BasinsName = fname_prefix + '_AllBasins' + raster_ext print(BasinsName) Basins = LSDP.GetBasinOutlines(DataDirectory, BasinsName) ChannelFileName = fname_prefix + "_chi_data_map.csv" chi_csv_fname = DataDirectory + ChannelFileName thisPointData = LSDMap_PD.LSDMap_PointData(chi_csv_fname) # clear the plot plt.clf() # set up the base image and the map print("I am showing the basins without text labels.") MF = MapFigure(HillshadeName, DataDirectory, coord_type="UTM_km", colourbar_location="None") MF.plot_polygon_outlines(Basins, linewidth=0.8) MF.add_drape_image(BasinsName, DataDirectory, colourmap=cmap, alpha=0.1, discrete_cmap=False, n_colours=len(basin_keys), show_colourbar=False, modify_raster_values=True, old_values=basin_junctions, new_values=basin_keys, cbar_type=int) MF.add_point_data(thisPointData, column_for_plotting="basin_key", scale_points=True, column_for_scaling="drainage_area", this_colourmap=cmap, scaled_data_in_log=True, max_point_size=3, min_point_size=1) # Save the image if len(out_fname_prefix) == 0: ImageName = DataDirectory + fname_prefix + "_channels_with_basins." + fig_format else: ImageName = DataDirectory + out_fname_prefix + "_channels_with_basins." + fig_format MF.save_fig(fig_width_inches=fig_size_inches, FigFileName=ImageName, axis_style=ax_style, FigFormat=fig_format, Fig_dpi=dpi)
def long_profiler_centrelines(DataDirectory, fname_prefix, shapefile_name, colour_by_ksn=False, ages="", FigFormat='png'): """ Function takes in the csv file of terrace centreline data and plots as a long profile against the baseline channel. Author: FJC """ # check if a directory exists for the chi plots. If not then make it. T_directory = DataDirectory + 'terrace_plots/' if not os.path.isdir(T_directory): os.makedirs(T_directory) # make a figure fig = CreateFigure() ax = plt.subplot(111) # check if the csv already exists. if not then select the points from the centrelines csv_filename = DataDirectory + fname_prefix + '_terrace_info_centrelines.csv' if os.path.isfile(csv_filename): terrace_df = pd.read_csv(DataDirectory + fname_prefix + '_terrace_info_centrelines.csv') else: terrace_df = SelectTerracePointsFromCentrelines(DataDirectory, shapefile_name, fname_prefix, distance=5) # read in the baseline channel csv lp = H.ReadMChiSegCSV(DataDirectory, fname_prefix) lp = lp[lp['elevation'] != -9999] # get the distance from outlet along the baseline for each terrace pixels terrace_df = terrace_df.merge(lp, left_on="BaselineNode", right_on="node") # make the long profile plot terrace_ids = terrace_df['TerraceID'].unique() # sort out the colours. We want a different colour for each terrace... this_cmap = cm.viridis norm = colors.Normalize(vmin=0, vmax=0.01, clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=this_cmap) terrace_gradients = [] for id in terrace_ids: # mask for this ID this_df = terrace_df[terrace_df['TerraceID'] == id] this_dist = this_df['flow_distance'] / 1000 this_elev = this_df['Elevation'] # work out the number of bins. We want a spacing of ~ 10 m bins = np.arange(this_dist.min(), this_dist.max(), 0.05) n_bins = len(bins) if n_bins < 1: n_bins = 1 # bin the data n, _ = np.histogram(this_dist, bins=n_bins) sy, _ = np.histogram(this_dist, bins=n_bins, weights=this_elev) sy2, _ = np.histogram(this_dist, bins=n_bins, weights=this_elev * this_elev) mean = sy / n # work out the gradient of each terrace ID...rise/run. Use this to colour the terrace. delta_x = this_df['flow_distance'].max( ) - this_df['flow_distance'].min() delta_z = this_elev.max() - this_elev.min() gradient = delta_z / delta_x color = mapper.to_rgba(gradient) terrace_gradients.append(gradient) # plot the terrace ax.plot((_[1:] + _[:-1]) / 2, mean, c=color, zorder=2) lp_mainstem = H.read_index_channel_csv(DataDirectory, fname_prefix) lp_mainstem = lp_mainstem[lp_mainstem['elevation'] != -9999] lp_mainstem = lp_mainstem.merge(lp, left_on="id", right_on="node") if colour_by_ksn == True: ax.scatter(lp_mainstem["flow_distance_x"] / 1000, lp_mainstem["elevation_x"], c=lp_mainstem["m_chi"], cmap=cm.hot, norm=colors.Normalize(lp_mainstem["m_chi"].min(), lp_mainstem["m_chi"].max()), s=0.5, lw=0.1) else: ax.plot(lp_mainstem['flow_distance_x'] / 1000, lp_mainstem['elevation_x'], 'k', lw=1, label='_nolegend_') # if present, plot the ages on the profile print(ages) if ages: # read in the ages csv ages_df = pd.read_csv(DataDirectory + ages) upstream_dist = list(ages_df['upstream_dist']) elevation = list(ages_df['elevation']) ax.scatter(upstream_dist, elevation, s=8, c="w", edgecolors="k", label="$^{14}$C age (cal years B.P.)") ax.legend(loc='upper left', fontsize=8, numpoints=1) # set axis params and save ax.set_xlabel('Flow distance (km)') ax.set_ylabel('Elevation (m)') ax.set_xlim(0, (terrace_df['flow_distance'].max() / 1000)) ax.set_ylim(0, terrace_df['elevation'].max() + 10) # add a colourbar mapper.set_array(terrace_gradients) cbar = plt.colorbar(mapper, cmap=this_cmap, norm=norm, orientation='vertical') cbar.set_label('Gradient (m/m)') plt.tight_layout() #plt.show() plt.savefig(T_directory + fname_prefix + '_terrace_plot_centrelines.' + FigFormat, format=FigFormat, dpi=300)
def MakeTerracePlotChiSpace(DataDirectory, fname_prefix, shapefile_name, colour_by_ksn=True): """ This function makes a plot of the terraces in chi-elevation space. The elevation of each terrace is plotted based on the chi of the nearest channel FJC 21/03/18 """ # check if a directory exists for the chi plots. If not then make it. T_directory = DataDirectory + 'terrace_plots/' if not os.path.isdir(T_directory): os.makedirs(T_directory) # make a figure fig = CreateFigure() ax = plt.subplot(111) # check if the csv already exists. if not then select the points from the centrelines csv_filename = DataDirectory + fname_prefix + '_terrace_info_centrelines.csv' if os.path.isfile(csv_filename): terrace_df = pd.read_csv(DataDirectory + fname_prefix + '_terrace_info_centrelines.csv') else: terrace_df = SelectTerracePointsFromCentrelines(DataDirectory, shapefile_name, fname_prefix, distance=5) # read in the mchi csv lp = H.ReadMChiSegCSV(DataDirectory, fname_prefix) lp = lp[lp['elevation'] != -9999] # get the distance from outlet along the baseline for each terrace pixels terrace_df = terrace_df.merge(lp, left_on="BaselineNode", right_on="node") # make the long profile plot terrace_ids = terrace_df['TerraceID'].unique() # sort out the colours. We want a different colour for each terrace... this_cmap = cm.viridis norm = colors.Normalize(vmin=terrace_df['m_chi'].min(), vmax=terrace_df['m_chi'].max() - 10, clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=this_cmap) terrace_gradients = [] for id in terrace_ids: # mask for this ID this_df = terrace_df[terrace_df['TerraceID'] == id] this_chi = this_df['chi'] this_elev = this_df['Elevation'] # work out the number of bins. We want a spacing of ~ 10 m bins = np.arange(this_chi.min(), this_chi.max(), 0.01) n_bins = len(bins) if n_bins < 1: n_bins = 1 # bin the data n, _ = np.histogram(this_chi, bins=n_bins) sy, _ = np.histogram(this_chi, bins=n_bins, weights=this_elev) sy2, _ = np.histogram(this_chi, bins=n_bins, weights=this_elev * this_elev) mean = sy / n # work out the gradient of each terrace ID...rise/run. Use this to colour the terrace. delta_x = this_df['chi'].max() - this_df['chi'].min() delta_z = this_elev.max() - this_elev.min() gradient = delta_z / delta_x color = mapper.to_rgba(gradient) terrace_gradients.append(gradient) # plot the terrace ax.plot((_[1:] + _[:-1]) / 2, mean, c=color, zorder=2) lp_mainstem = H.read_index_channel_csv(DataDirectory, fname_prefix) lp_mainstem = lp_mainstem[lp_mainstem['elevation'] != -9999] lp_mainstem = lp_mainstem.merge(lp, left_on="id", right_on="node") if colour_by_ksn == True: ax.scatter(lp_mainstem["chi"], lp_mainstem["elevation_y"], c=lp_mainstem["m_chi"], cmap=cm.viridis, norm=colors.Normalize(lp_mainstem["m_chi"].min() - 10, lp_mainstem["m_chi"].max()), s=0.5, lw=0.1) else: ax.plot(lp_mainstem['chi'], lp_mainstem['elevation_y'], 'k', lw=1) # set axis params and save ax.set_xlabel('$\chi$') ax.set_ylabel('Elevation (m)') ax.set_xlim(0, (terrace_df['chi'].max())) ax.set_ylim(0, terrace_df['Elevation'].max() + 10) # add a colourbar mapper.set_array(terrace_gradients) cbar = plt.colorbar(mapper, cmap=this_cmap, norm=norm, orientation='vertical') cbar.set_label('$k_{sn}$') plt.tight_layout() #plt.show() plt.savefig(T_directory + fname_prefix + '_terrace_plot_chi.png', format='png', dpi=300)
def PlotSwath(swath_csv_name, FigFileName='Image.png', size_format="geomorphology", fig_format="png", dpi=500, aspect_ratio=2): """ This plots a swath profile Args: swath_csv_name (str): the name of the csv file (with path!) Author: SMM Date 20/02/2018 """ print("STARTING swath plot.") # Set up fonts for plots label_size = 12 rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['arial'] rcParams['font.size'] = label_size # make a figure, if size_format == "geomorphology": fig = plt.figure(1, facecolor='white', figsize=(6.25, 3.5)) fig_size_inches = 6.25 l_pad = -40 elif size_format == "big": fig = plt.figure(1, facecolor='white', figsize=(16, 9)) fig_size_inches = 16 l_pad = -50 else: fig = plt.figure(1, facecolor='white', figsize=(4.92126, 3.5)) fig_size_inches = 4.92126 l_pad = -35 # Note all the below parameters are overwritten by the figure sizer routine gs = plt.GridSpec(100, 100, bottom=0.15, left=0.1, right=1.0, top=1.0) ax = fig.add_subplot(gs[25:100, 10:95]) print("Getting data from the file: " + swath_csv_name) thisPointData = LSDMap_PD.LSDMap_PointData(swath_csv_name) distance = thisPointData.QueryData('Distance').values mean_val = thisPointData.QueryData('Mean').values min_val = thisPointData.QueryData('Min').values max_val = thisPointData.QueryData('Max').values # Get the minimum and maximum distances X_axis_min = 0 X_axis_max = distance[-1] n_target_tics = 5 xlocs, new_x_labels = LSDMap_BP.TickConverter(X_axis_min, X_axis_max, n_target_tics) ax.fill_between(distance, min_val, max_val, facecolor='orange', alpha=0.5, interpolate=True) ax.plot(distance, mean_val, "b", linewidth=1) ax.plot(distance, min_val, "k", distance, max_val, "k", linewidth=1) ax.spines['top'].set_linewidth(1) ax.spines['left'].set_linewidth(1) ax.spines['right'].set_linewidth(1) ax.spines['bottom'].set_linewidth(1) ax.set_ylabel("Elevation (m)") ax.set_xlabel("Distance along swath (km)") ax.set_xticks(xlocs) ax.set_xticklabels(new_x_labels, rotation=60) # This gets all the ticks, and pads them away from the axis so that the corners don't overlap ax.tick_params(axis='both', width=1, pad=2) for tick in ax.xaxis.get_major_ticks(): tick.set_pad(2) # Lets try to size the figure cbar_L = "None" [fig_size_inches, map_axes, cbar_axes] = Helper.MapFigureSizer(fig_size_inches, aspect_ratio, cbar_loc=cbar_L, title="None") fig.set_size_inches(fig_size_inches[0], fig_size_inches[1]) ax.set_position(map_axes) FigFormat = fig_format print("The figure format is: " + FigFormat) if FigFormat == 'show': plt.show() elif FigFormat == 'return': return fig else: plt.savefig(FigFileName, format=FigFormat, dpi=dpi) fig.clf()
def ExampleOne_PartTwo_PrintBasins(DataDirectory,fname_prefix): """ This function makes a shaded relief plot of the DEM with the basins coloured by the basin ID. Args: DataDirectory (str): the data directory with the m/n csv files fname_prefix (str): The prefix for the m/n csv files Returns: Shaded relief plot with the basins coloured by basin ID Author: FJC """ #import modules from LSDMapFigure.PlottingRaster import MapFigure # Set up fonts for plots label_size = 10 rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['Liberation Sans'] rcParams['font.size'] = label_size size_format = "geomorphology" # set figure sizes based on format if size_format == "geomorphology": fig_width_inches = 6.25 elif size_format == "big": fig_width_inches = 16 else: fig_width_inches = 4.92126 # get the basin IDs to make a discrete colourmap for each ID BasinInfoDF = PlotHelp.ReadBasinInfoCSV(DataDirectory, fname_prefix) basin_keys = list(BasinInfoDF['basin_key']) basin_keys = [int(x) for x in basin_keys] basin_junctions = list(BasinInfoDF['outlet_junction']) basin_junctions = [float(x) for x in basin_junctions] print ('Basin keys are: ') print basin_keys # get a discrete colormap cmap = plt.cm.jet # going to make the basin plots - need to have bil extensions. print("I'm going to make the basin plots. Your topographic data must be in ENVI bil format or I'll break!!") # get the rasters raster_ext = '.bil' #BackgroundRasterName = fname_prefix+raster_ext HillshadeName = fname_prefix+'_hs'+raster_ext BasinsName = fname_prefix+'_AllBasins'+raster_ext print (BasinsName) # create the map figure MF = MapFigure(HillshadeName, DataDirectory,coord_type="UTM_km", colourbar_location='bottom') # add the basins drape MF.add_drape_image(BasinsName, DataDirectory, colourmap = cmap, alpha = 0.8, colorbarlabel='Basin ID', discrete_cmap=True, n_colours=len(basin_keys), show_colourbar = True, modify_raster_values=True, old_values=basin_junctions, new_values=basin_keys, cbar_type = int) # add the basin outlines Basins = LSDP.GetBasinOutlines(DataDirectory, BasinsName) MF.plot_polygon_outlines(Basins, linewidth=0.8) FigFormat = "png" ImageName = DataDirectory+fname_prefix+'_coloured_basins.'+FigFormat MF.save_fig(fig_width_inches = fig_width_inches, FigFileName = ImageName, FigFormat=FigFormat, Fig_dpi = 250) # Save the figure
def ExampleOne_PartFour_MaskBasins(DataDirectory, fname_prefix): """ This function makes a shaded relief plot of the DEM with the basins coloured by the basin ID. It shows how to mask certain basins. Search the function for "Basins_to_mask". Args: DataDirectory (str): the data directory with the m/n csv files fname_prefix (str): The prefix for the m/n csv files Returns: Shaded relief plot with the basins coloured by basin ID Author: SMM """ import numpy as np Basins_to_mask = [0,4,6] FigFormat = "png" size_format = "geomorphology" #import modules # from LSDMapFigure.PlottingRaster import MapFigure # from LSDMapFigure.PlottingRaster import BaseRaster # import LSDPlottingTools.LSDMap_VectorTools as LSDMap_VT # import LSDPlottingTools.LSDMap_PointTools as LSDMap_PT # Set up fonts for plots label_size = 10 rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['Liberation Sans'] rcParams['font.size'] = label_size # set figure sizes based on format if size_format == "geomorphology": fig_width_inches = 6.25 elif size_format == "big": fig_width_inches = 16 else: fig_width_inches = 4.92126 # get the basin IDs to make a discrete colourmap for each ID BasinInfoDF = PlotHelp.ReadBasinInfoCSV(DataDirectory, fname_prefix) basin_keys = list(BasinInfoDF['basin_key']) basin_keys = [int(x) for x in basin_keys] basin_junctions = list(BasinInfoDF['outlet_junction']) basin_junctions = [float(x) for x in basin_junctions] # get the junctions to mask key_to_index_dict = dict(zip(basin_keys,basin_junctions)) junctions_to_mask = [] for basin in Basins_to_mask: junctions_to_mask.append( key_to_index_dict[basin]) print("The junctions to mask are") print(junctions_to_mask) print ('Basin keys are: ') print basin_keys print("Let me mask those for you") new_keys = [] for key in basin_keys: if key in Basins_to_mask: new_keys.append(np.nan) else: new_keys.append(key) print("The new keys are: ") print(new_keys) basin_keys = new_keys # get a discrete colormap cmap = plt.cm.jet # going to make the basin plots - need to have bil extensions. print("I'm going to make the basin plots. Your topographic data must be in ENVI bil format or I'll break!!") # get the rasters raster_ext = '.bil' #BackgroundRasterName = fname_prefix+raster_ext HillshadeName = fname_prefix+'_hs'+raster_ext BasinsName = fname_prefix+'_AllBasins'+raster_ext print (BasinsName) # create the map figure # We set colourbar location to none since we are labelling the figures MF = MapFigure(HillshadeName, DataDirectory,coord_type="UTM_km", colourbar_location='none') # add the basins drape MF.add_drape_image(BasinsName, DataDirectory, colourmap = cmap, alpha = 0.8, colorbarlabel='Basin ID', discrete_cmap=True, n_colours=len(basin_keys), show_colourbar = True, modify_raster_values=True, old_values=basin_junctions, new_values=basin_keys, cbar_type = int) # add the basin outlines Basins = LSDP.GetBasinOutlines(DataDirectory, BasinsName) # get rid of the basins that are being masked for junction in junctions_to_mask: del Basins[junction] # note that at this stage the Basins are keyed with the junction index MF.plot_polygon_outlines(Basins, linewidth=0.8) # add the basin labelling label_dict = dict(zip(basin_junctions,basin_keys)) # this dict has the basin junction as the key and the basin_key as the value Points = LSDP.GetPointWithinBasins(DataDirectory, BasinsName) # get rid of points as well for junction in junctions_to_mask: del Points[junction] del label_dict[junction] MF.add_text_annotation_from_shapely_points(Points, text_colour='k', label_dict=label_dict) # Save the figure ImageName = DataDirectory+fname_prefix+'_labelled_basins.'+FigFormat MF.save_fig(fig_width_inches = fig_width_inches, FigFileName = ImageName, FigFormat=FigFormat, Fig_dpi = 250)
def concavityCatcher(full_path, write_name, processed=False, basins_not_glaciated=[], alter_ID=True): #returns the basin_key and median concavity write_name = '/' + write_name #reading in the basin info BasinDF = Helper.ReadMCPointsCSV(full_path, write_name) #Getting mn data PointsDF = MN.GetMOverNRangeMCPoints(BasinDF, start_movern=0.1, d_movern=0.05, n_movern=18) #extract basin key and concavity as list basin_series = PointsDF["basin_key"] concavity_series = PointsDF["Median_MOverNs"] if not disorder: basin_key = basin_series.tolist() basin_keys = [] for x in basin_key: x = int(x) basin_keys.append(x) concavities = concavity_series.tolist() if disorder: print "got to disorder" print full_path, write_name if not alter_ID: basin_keys, concavities = getDisorderConcavity( full_path, write_name, fromConcavityCatcher=True, alter_ID=False) else: basin_keys, concavities, new_IDs = getDisorderConcavity( full_path, write_name, fromConcavityCatcher=True) if not processed: print concavities return basin_keys, concavities, new_IDs if processed: print "got to processed" #processed_concavities = [] #for x in basins_not_glaciated: try: processedDF = PointsDF[PointsDF.basin_key.isin( basins_not_glaciated)] except Exception as e: print e print "error in processed section of concavity catcher" #print("this is the processed DF") processed_basin = processedDF["basin_key"] processed_concavity = processedDF["Median_MOverNs"] basin_list = processed_basin.tolist() concavity_list = processed_concavity.tolist() return basin_list, concavity_list
def main(argv): # If there are no arguments, send to the welcome screen if not len(sys.argv) > 1: full_paramfile = print_welcome() sys.exit() # Get the arguments import argparse parser = argparse.ArgumentParser() # The location of the data files parser.add_argument( "-dir", "--base_directory", type=str, help="The base directory. If not defined, current directory.") parser.add_argument("-fname", "--fname_prefix", type=str, help="The prefix of your DEM WITHOUT EXTENSION!") parser.add_argument( "-fmt", "--FigFormat", type=str, default='png', help="Set the figure format for the plots. Default is png") args = parser.parse_args() # get the base directory if args.base_directory: DataDirectory = args.base_directory # check if you remembered a / at the end of your path_name if not DataDirectory.endswith("/"): print( "You forgot the '/' at the end of the directory, appending...") DataDirectory = this_dir + "/" else: this_dir = os.getcwd() if not args.fname_prefix: print( "WARNING! You haven't supplied your DEM name. Please specify this with the flag '-fname'" ) sys.exit() else: fname_prefix = args.fname_prefix # set to not parallel parallel = False faults = True FigFormat = args.FigFormat # check if a directory exists for the chi plots. If not then make it. raster_directory = DataDirectory + 'raster_plots/' if not os.path.isdir(raster_directory): os.makedirs(raster_directory) # Set up fonts for plots label_size = 8 rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['arial'] rcParams['font.size'] = label_size # set figure sizes based on format size_format = "" if size_format == "geomorphology": fig_width_inches = 6.25 elif size_format == "big": fig_width_inches = 16 else: fig_width_inches = 4.92126 # get the basin IDs to make a discrete colourmap for each ID BasinInfoDF = Helper.ReadBasinInfoCSV(DataDirectory, fname_prefix) basin_keys = list(BasinInfoDF['basin_key']) basin_keys = [int(x) for x in basin_keys] basin_junctions = list(BasinInfoDF['outlet_junction']) basin_junctions = [int(x) for x in basin_junctions] # get a discrete colormap cmap = plt.cm.viridis # going to make the basin plots - need to have bil extensions. print( "I'm going to make the basin plots. Your topographic data must be in ENVI bil format or I'll break!!" ) # get the rasters raster_ext = '.bil' BackgroundRasterName = fname_prefix + raster_ext HillshadeName = fname_prefix + '_hs' + raster_ext BasinsName = fname_prefix + '_AllBasins' + raster_ext # create the map figure MF = MapFigure(HillshadeName, DataDirectory, coord_type="UTM_km", colourbar_location='none') # add the basins drape BasinsDict = dict(zip(basin_keys, basin_keys)) # MF.add_basin_plot(BasinsName,fname_prefix,DataDirectory, label_basins=False, # use_keys_not_junctions = True, show_colourbar = False, # value_dict = BasinsDict, discrete_cmap=True, n_colours=len(basin_keys), # colorbarlabel = "Basin ID", cbar_type=int, tickspacefactor=2, # colourmap = cmap, edgecolour='none', adjust_text = True, parallel=parallel) # add the channel network if not parallel: ChannelDF = Helper.ReadChiDataMapCSV(DataDirectory, fname_prefix) else: ChannelDF = Helper.AppendChiDataMapCSVs(DataDirectory) # remove chi no data values ChannelDF = ChannelDF[ChannelDF.chi != -9999] ChannelPoints = LSDP.LSDMap_PointData(ChannelDF, data_type="pandas", PANDEX=True) # add chi map MF.add_point_data(ChannelPoints, column_for_plotting="chi", column_for_scaling="chi", colorbarlabel="$\chi$ (m)", show_colourbar=True, this_colourmap=cmap, colourbar_location="top") # add the faults if faults: LineFileName = DataDirectory + fname_prefix + "_faults.shp" MF.add_line_data(LineFileName, linestyle="-", linewidth=1.5, zorder=99, legend=True, label="Fault Segments") # add the basin outlines ### need to parallelise if not parallel: Basins = LSDP.GetBasinOutlines(DataDirectory, BasinsName) else: Basins = LSDP.GetMultipleBasinOutlines(DataDirectory) # Find the relay basins and plot separately RelayBasinIDs = [1248, 4788, 4995, 5185, 6187, 6758, 6805] RelayBasins = { key: value for key, value in Basins.items() if key in RelayBasinIDs } # Plot all basins MF.plot_polygon_outlines(Basins, colour='k', linewidth=0.5, alpha=1, legend=True, label="Catchments") MF.plot_polygon_outlines(RelayBasins, colour='r', linewidth=0.5, alpha=1, legend=True, label="Relay Catchments") # Add the legend MF.add_legend() # Save the figure ImageName = raster_directory + fname_prefix + '_chi_map.' + FigFormat MF.save_fig(fig_width_inches=fig_width_inches, FigFileName=ImageName, FigFormat=FigFormat, Fig_dpi=300)
def PrintBasins_Complex(DataDirectory, fname_prefix, use_keys_not_junctions=True, show_colourbar=False, Remove_Basins=[], Rename_Basins={}, Value_dict={}, cmap="jet", colorbarlabel="colourbar", size_format="ESURF", fig_format="png", dpi=250, out_fname_prefix="", include_channels=False, label_basins=True): """ This function makes a shaded relief plot of the DEM with the basins coloured by the basin ID. Args: DataDirectory (str): the data directory with the m/n csv files fname_prefix (str): The prefix for the m/n csv files use_keys_not_junctions (bool): If true use basin keys to locate basins, otherwise use junction indices show_colourbar (bool): if true show the colourbar Remove_Basins (list): A lists containing either key or junction indices of basins you want to remove from plotting Rename_Basins (dict): A dict where the key is either basin key or junction index, and the value is a new name for the basin denoted by the key Value_dict (dict): A dict where the key is either basin key or junction index, and the value is a value of the basin that is used to colour the basins add_basin_labels (bool): If true, label the basins with text. Otherwise use a colourbar. cmap (str or colourmap): The colourmap to use for the plot cbar_loc (str): where you want the colourbar. Options are none, left, right, top and botton. The colourbar will be of the elevation. If you want only a hillshade set to none and the cmap to "gray" size_format (str): Either geomorphology or big. Anything else gets you a 4.9 inch wide figure (standard ESURF size) fig_format (str): An image format. png, pdf, eps, svg all valid dpi (int): The dots per inch of the figure out_fname_prefix (str): The prefix of the image file. If blank uses the fname_prefix include_channels (bool): If true, adds a channel plot. It uses the chi_data_maps file label_basins (bool): If true, the basins get labels Returns: Shaded relief plot with the basins coloured by basin ID. Uses a colourbar to show each basin. This allows more complex plotting with renamed and excluded basins. Author: FJC, SMM """ #import modules from LSDMapFigure.PlottingRaster import MapFigure # set figure sizes based on format if size_format == "geomorphology": fig_width_inches = 6.25 elif size_format == "big": fig_width_inches = 16 else: fig_width_inches = 4.92126 # get the basin IDs to make a discrete colourmap for each ID BasinInfoDF = PlotHelp.ReadBasinInfoCSV(DataDirectory, fname_prefix) basin_keys = list(BasinInfoDF['basin_key']) basin_keys = [int(x) for x in basin_keys] basin_junctions = list(BasinInfoDF['outlet_junction']) basin_junctions = [float(x) for x in basin_junctions] print('Basin keys are: ') print(basin_keys) # going to make the basin plots - need to have bil extensions. print( "I'm going to make the basin plots. Your topographic data must be in ENVI bil format or I'll break!!" ) # get the rasters raster_ext = '.bil' #BackgroundRasterName = fname_prefix+raster_ext HillshadeName = fname_prefix + '_hs' + raster_ext BasinsName = fname_prefix + '_AllBasins' + raster_ext # This initiates the figure MF = MapFigure(HillshadeName, DataDirectory, coord_type="UTM_km", colourbar_location="None") # This adds the basins MF.add_basin_plot(BasinsName, fname_prefix, DataDirectory, mask_list=Remove_Basins, rename_dict=Rename_Basins, value_dict=Value_dict, use_keys_not_junctions=use_keys_not_junctions, show_colourbar=show_colourbar, discrete_cmap=True, n_colours=15, colorbarlabel=colorbarlabel, colourmap=cmap, adjust_text=False, label_basins=label_basins) # See if you need the channels if include_channels: print("I am going to add some channels for you") ChannelFileName = fname_prefix + "_chi_data_map.csv" chi_csv_fname = DataDirectory + ChannelFileName thisPointData = LSDMap_PD.LSDMap_PointData(chi_csv_fname) MF.add_point_data(thisPointData, column_for_plotting="basin_key", scale_points=True, column_for_scaling="drainage_area", this_colourmap="Blues_r", scaled_data_in_log=True, max_point_size=3, min_point_size=1, discrete_colours=True, NColours=1, zorder=5) # Save the image if len(out_fname_prefix) == 0: ImageName = DataDirectory + fname_prefix + "_selected_basins." + fig_format else: ImageName = DataDirectory + out_fname_prefix + "_selected_basins." + fig_format MF.save_fig(fig_width_inches=fig_width_inches, FigFileName=ImageName, FigFormat=fig_format, Fig_dpi=dpi, transparent=True) # Save the figure
def MakeTerraceHeatMap(DataDirectory, fname_prefix, mchi_fname, prec=100, bw_method=0.03, FigFormat='png', ages=""): """ Function to make a heat map of the terrace pixels using Gaussian KDE. see https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.stats.gaussian_kde.html for more details. Args: DataDirectory(str): the data directory fname_prefix(str): prefix of your DEM prec(int): the resolution for the KDE. Increase this to get a finer resolution, decrease for coarser. bw_method: the method for determining the bandwidth of the KDE. This is apparently quite sensitive to this. Can either be "scott", "silverman" (where the bandwidth will be determined automatically), or a scalar. Default = 0.03 FigFormat(str): figure format, default = png ages (str): Can pass in the name of a csv file with terrace ages which will be plotted on the profile. Must be in the same directory FJC 26/03/18 """ import scipy.stats as st # check if a directory exists for the chi plots. If not then make it. T_directory = DataDirectory + 'terrace_plots/' if not os.path.isdir(T_directory): os.makedirs(T_directory) # make a figure fig = CreateFigure() ax = plt.subplot(111) # read in the terrace DataFrame terrace_df = H.read_terrace_csv(DataDirectory, fname_prefix) terrace_df = terrace_df[terrace_df['BaselineNode'] != -9999] # read in the mchi csv lp = H.ReadMChiSegCSV(DataDirectory, mchi_fname) lp = lp[lp['elevation'] != -9999] # get the distance from outlet along the baseline for each terrace pixels terrace_df = terrace_df.merge(lp, left_on="BaselineNode", right_on="node") flow_dist = terrace_df['flow_distance'] / 1000 print(terrace_df) ## Getting the extent of our dataset xmin = 0 xmax = flow_dist.max() ymin = 0 ymax = terrace_df["Elevation"].max() ## formatting the data in a meshgrid X, Y = np.meshgrid(np.linspace(0, xmax, num=prec), np.linspace(0, ymax, num=prec)) positions = np.vstack([X.ravel(), Y.ravel()[::-1] ]) # inverted Y to get the axis in the bottom left values = np.vstack([flow_dist, terrace_df['Elevation']]) if len(values) == 0: print("You don't have any terraces, I'm going to quit now.") else: # get the kernel density estimation KDE = st.gaussian_kde(values, bw_method=bw_method) Z = np.reshape(KDE(positions).T, X.shape) # plot the density on the profile cmap = cm.gist_heat_r cmap.set_bad(alpha=0) cb = ax.imshow(Z, interpolation="None", extent=[xmin, xmax, ymin, ymax], cmap=cmap, aspect="auto") # plot the main stem channel lp_mainstem = H.read_index_channel_csv(DataDirectory, fname_prefix) lp_mainstem = lp_mainstem[lp_mainstem['elevation'] != -9999] lp_mainstem = lp_mainstem.merge(lp, left_on="id", right_on="node") lp_flow_dist = lp_mainstem['flow_distance_y'] / 1000 ax.plot(lp_flow_dist, lp_mainstem['elevation_y'], 'k', lw=1, label='_nolegend_') # if present, plot the ages on the profile if ages: # read in the ages csv ages_df = pd.read_csv(DataDirectory + ages) upstream_dist = list(ages_df['upstream_dist']) elevation = list(ages_df['elevation']) ax.scatter(upstream_dist, elevation, s=8, c="w", edgecolors="k", label="$^{14}$C age (cal years B.P.)") ax.legend(loc='upper left', fontsize=8, numpoints=1) # set some plot lims ax.set_xlim(xmin, xmax) ax.set_ylim(ymin, ymax) ax.set_xlabel('Flow distance (km)') ax.set_ylabel('Elevation (m)') # add a colourbar cbar = plt.colorbar(cb, cmap=cmap, orientation='vertical') cbar.set_label('Density') # save the figure plt.tight_layout() plt.savefig(T_directory + fname_prefix + '_terrace_plot_heat_map.png', format=FigFormat, dpi=300) plt.clf()
def main(argv): # If there are no arguments, send to the welcome screen if not len(sys.argv) > 1: full_paramfile = print_welcome() sys.exit() # Get the arguments import argparse parser = argparse.ArgumentParser() parser.add_argument("-dir", "--base_directory", type=str, help="The base directory with the MLE analyses. If this isn't defined I'll assume it's the same as the current directory.") parser.add_argument("-fname", "--fname_prefix", type=str, help="The prefix of your DEM WITHOUT EXTENSION!!! This must be supplied or you will get an error.") # What sort of analyses you want parser.add_argument("-PR", "--plot_rasters", type=bool, default=False, help="If this is true, I'll make raster plots of the m/n value and basin keys") parser.add_argument("-chi", "--plot_basic_chi", type=bool, default=False, help="If this is true I'll make basin chi plots for each basin coloured by elevation.") parser.add_argument("-PC", "--plot_chi_profiles", type=bool, default=False, help="If this is true, I'll make chi-elevation plots for each basin coloured by the MLE") parser.add_argument("-K", "--plot_chi_by_K", type=bool, default=False, help="If this is true, I'll make chi-elevation plots for each basin coloured by K. NOTE - you MUST have a column in your chi csv with the K value or this will break!") parser.add_argument("-pcbl", "--plot_chi_by_lith", type=bool, default=False, help="If this is true, I'll make chi-elevation plots for each basin coloured by litho. NOTE - you MUST have a column in your chi csv with the K value or this will break!") parser.add_argument("-PO", "--plot_outliers", type=bool, default=False, help="If this is true, I'll make chi-elevation plots with the outliers removed") parser.add_argument("-MLE", "--plot_MLE_movern", type=bool, default=False, help="If this is true, I'll make a plot of the MLE values for each m/n showing how the MLE values change as you remove the tributaries") parser.add_argument("-SA", "--plot_SA_data", type=bool, default=False, help="If this is true, I'll make a plot of the MLE values for each m/n showing how the MLE values change as you remove the tributaries") parser.add_argument("-MCMC", "--plot_MCMC", type=bool, default=False, help="If this is true, I'll make a plot of the MCMC analysis. Specify which basins you want with the -basin_keys flag.") parser.add_argument("-pts", "--point_uncertainty", type=bool, default=False, help="If this is true, I'll make a plot of the range in m/n from the MC points analysis") parser.add_argument("-hist", "--plot_histogram", type=bool, default=False, help="If this is true, I'll make plots of the pdfs of m/n values for each method.") # parser.add_argument("-basin_joyplot", "--basin_joyplot", type=bool, default=False, help="If this is true, I'll make a joyplot showing m/n for each basin from the chi points") parser.add_argument("-SUM", "--plot_summary", type=bool, default=False, help="If this is true, I'll make the summary CSV file and plot of the best fit m/n from each of the methods.") parser.add_argument("-ALL", "--all_movern_estimates", type=bool, default=False, help="If this is true, I'll make all the plots") # Plotting options parser.add_argument("-points", "--point_analysis", type=bool, default=False, help="If this is true then I'll assume that you're running the MLE analysis using the point method. Default = False") parser.add_argument("-show_SA_raw", "--show_SA_raw", type=bool, default=True, help="Show the raw S-A data in background of SA plot. Default = True") parser.add_argument("-show_SA_segments", "--show_SA_segments", type=bool, default=False, help="Show the segmented S-A data in SA plot. Default = False") parser.add_argument("-test_SA_regression", "--test_SA_regression", type=bool, default=False, help="If this is true I'll print the regression stats for the slope area plots.") parser.add_argument("-show_legend", "--show_legend", type=bool, default=True, help="If this is true, I'll display the legend for the SA plots.") parser.add_argument("-basin_keys", "--basin_keys",type=str,default = "", help = "This is a comma delimited string that gets the list of basins you want for the plotting. Default = no basins") # These control the format of your figures parser.add_argument("-fmt", "--FigFormat", type=str, default='png', help="Set the figure format for the plots. Default is png") parser.add_argument("-size", "--size_format", type=str, default='ESURF', help="Set the size format for the figure. Can be 'big' (16 inches wide), 'geomorphology' (6.25 inches wide), or 'ESURF' (4.92 inches wide) (defualt esurf).") parser.add_argument("-animate", "--animate", type=bool, default=True, help="If this is true I will create an animation of the chi plots. Must be used with the -PC flag set to True.") parser.add_argument("-keep_pngs", "--keep_pngs", type=bool, default=False, help="If this is true I will delete the png files when I animate the figures. Must be used with the -animate flag set to True.") args = parser.parse_args() if not args.fname_prefix: print("WARNING! You haven't supplied your DEM name. Please specify this with the flag '-fname'") sys.exit() # get the base directory if args.base_directory: Directory = args.base_directory else: Directory = os.getcwd() # check the basins print("You told me that the basin keys are: ") print(args.basin_keys) if len(args.basin_keys) == 0: print("No basins found, I will plot all of them") these_basin_keys = [] else: these_basin_keys = [int(item) for item in args.basin_keys.split(',')] print("The basins I will plot are:") print(these_basin_keys) # get the range of moverns, needed for plotting BasinDF = Helper.ReadBasinStatsCSV(Directory+"/sigma_10/", args.fname_prefix) # we need the column headers columns = BasinDF.columns[BasinDF.columns.str.contains('m_over_n')].tolist() moverns = [float(x.split("=")[-1]) for x in columns] start_movern = moverns[0] n_movern = len(moverns) d_movern = (moverns[-1] - moverns[0])/(n_movern-1) # loop through each sub-directory with the sensitivity results MLE_str = "sigma_" for subdir, dirs, files in os.walk(Directory): for dir in dirs: if MLE_str in dir: this_dir = Directory+"/"+dir+'/' # make the plots depending on your choices # make the plots depending on your choices if args.plot_rasters: MN.MakeRasterPlotsBasins(this_dir, args.fname_prefix, args.size_format, simple_format) MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, size_format=args.size_format, FigFormat=simple_format) MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, movern_method="Chi_points", size_format=args.size_format, FigFormat=simple_format) MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, movern_method="SA", size_format=args.size_format, FigFormat=simple_format) if args.plot_basic_chi: MN.MakePlotsWithMLEStats(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern) if args.plot_chi_profiles: MN.MakeChiPlotsMLE(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat = simple_format, animate=args.animate, keep_pngs=args.keep_pngs) if args.plot_chi_by_K: MN.MakeChiPlotsColouredByK(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat=simple_format, animate=args.animate, keep_pngs=args.keep_pngs) if args.plot_chi_by_lith: MN.MakeChiPlotsColouredByLith(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat=simple_format, animate=args.animate, keep_pngs=args.keep_pngs) if args.plot_outliers: MN.PlotProfilesRemovingOutliers(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern) if args.plot_MLE_movern: MN.PlotMLEWithMOverN(this_dir, args.fname_prefix,basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat =simple_format) if args.plot_SA_data: SA.SAPlotDriver(this_dir, args.fname_prefix, FigFormat = simple_format,size_format=args.size_format, show_raw = args.show_SA_raw, show_segments = args.show_SA_segments,basin_keys = these_basin_keys) if args.test_SA_regression: #SA.TestSARegression(this_dir, args.fname_prefix) SA.LinearRegressionRawDataByChannel(this_dir,args.fname_prefix, basin_list=these_basin_keys) #SA.LinearRegressionSegmentedData(this_dir, args.fname_prefix, basin_list=these_basin_keys) if args.plot_MCMC: MN.plot_MCMC_analysis(this_dir, args.fname_prefix,basin_list=these_basin_keys, FigFormat= simple_format, size_format=args.size_format) if args.point_uncertainty: MN.PlotMCPointsUncertainty(this_dir, args.fname_prefix,basin_list=these_basin_keys, FigFormat=simple_format, size_format=args.size_format,start_movern=start_movern, d_movern=d_movern, n_movern=n_movern) if args.plot_histogram: MN.MakeMOverNSummaryHistogram(this_dir, args.fname_prefix,basin_list=these_basin_keys,start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=simple_format, size_format=args.size_format, show_legend=args.show_legend) if args.plot_summary: MN.CompareMOverNEstimatesAllMethods(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern) MN.MakeMOverNSummaryPlot(this_dir, args.fname_prefix, basin_list=these_basin_keys,start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat = simple_format,size_format=args.size_format, show_legend=args.show_legend) MN.MakeMOverNPlotOneMethod(this_dir,args.fname_prefix,basin_list=these_basin_keys,start_movern=start_movern,d_movern=d_movern,n_movern=n_movern,FigFormat=args.FigFormat,size_format=args.size_format) # if args.basin_joyplot: # MN.CompareMOverNEstimatesAllMethods(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern) # MN.MakeBasinJoyplot(this_dir, args.fname_prefix, basin_list=these_basin_keys, FigFormat=simple_format, size_format=args.size_format) if args.all_movern_estimates: # plot the rasters MN.MakeRasterPlotsBasins(this_dir, args.fname_prefix, args.size_format, args.FigFormat) MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, movern_method="Chi_full", size_format=args.size_format, FigFormat=args.FigFormat) MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, movern_method="Chi_points", size_format=args.size_format, FigFormat=args.FigFormat) MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, movern_method="SA", size_format=args.size_format, FigFormat=args.FigFormat) # make the chi plots MN.MakeChiPlotsMLE(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat = args.FigFormat, animate=True, keep_pngs=True) # make the SA plots SA.SAPlotDriver(this_dir, args.fname_prefix, FigFormat = args.FigFormat,size_format=args.size_format, show_raw = args.show_SA_raw, show_segments = True, basin_keys = these_basin_keys) SA.SAPlotDriver(this_dir, args.fname_prefix, FigFormat = args.FigFormat,size_format=args.size_format, show_raw = args.show_SA_raw, show_segments = False, basin_keys = these_basin_keys) #summary plots MN.CompareMOverNEstimatesAllMethods(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern) MN.MakeMOverNSummaryPlot(this_dir, args.fname_prefix, basin_list=these_basin_keys,start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat = args.FigFormat,size_format=args.size_format, show_legend=args.show_legend) #joyplot MN.MakeMOverNPlotOneMethod(this_dir,args.fname_prefix,basin_list=these_basin_keys,start_movern=start_movern,d_movern=d_movern,n_movern=n_movern,FigFormat=args.FigFormat,size_format=args.size_format) #MN.MakeMOverNSummaryHistogram(this_dir, args.fname_prefix,basin_list=these_basin_keys,start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=args.FigFormat, size_format=args.size_format, show_legend=args.show_legend) # collate all the results to get the final figure MN.PlotSensitivityResultsSigma(Directory, args.fname_prefix,FigFormat=args.FigFormat,size_format=args.size_format,movern_method='points')
def MakeTerraceHeatMapNormalised(DataDirectory, fname_prefix, mchi_fname, prec=100, bw_method=0.03, FigFormat='png', ages=""): """ Function to make a heat map of the terrace pixels using Gaussian KDE. Pixels are normalised based on elevation of closest channel pixel. see https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.stats.gaussian_kde.html for more details. Args: DataDirectory(str): the data directory fname_prefix(str): prefix of your DEM mchi_fname(str): if you specified a junction then this will be different from the junction fname prec(int): the resolution for the KDE. Increase this to get a finer resolution, decrease for coarser. bw_method: the method for determining the bandwidth of the KDE. This is apparently quite sensitive to this. Can either be "scott", "silverman" (where the bandwidth will be determined automatically), or a scalar. Default = 0.03 FigFormat(str): figure format, default = png ages (str): Can pass in the name of a csv file with terrace ages which will be plotted on the profile. Must be in the same directory FJC 26/03/18 """ import scipy.stats as st # check if a directory exists for the chi plots. If not then make it. T_directory = DataDirectory + 'terrace_plots/' if not os.path.isdir(T_directory): os.makedirs(T_directory) # make a figure fig = CreateFigure() ax = plt.subplot(111) #ax1 = plt.subplot(212) # read in the terrace DataFrame terrace_df = H.read_terrace_csv(DataDirectory, fname_prefix) terrace_df = terrace_df[terrace_df['BaselineNode'] != -9999] # read in the mchi csv lp = H.ReadMChiSegCSV(DataDirectory, mchi_fname) lp = lp[lp['elevation'] != -9999] # get the distance from outlet along the baseline for each terrace pixels terrace_df = terrace_df.merge(lp, left_on="BaselineNode", right_on="node") flow_dist = terrace_df['flow_distance'] / 1000 ## Getting the extent of our dataset xmin = 0 xmax = flow_dist.max() ymin = 0 ymax = terrace_df["ChannelRelief"].max() ## formatting the data in a meshgrid X, Y = np.meshgrid(np.linspace(0, xmax, num=prec), np.linspace(0, ymax, num=prec)) positions = np.vstack([X.ravel(), Y.ravel()[::-1] ]) # inverted Y to get the axis in the bottom left values = np.vstack([flow_dist, terrace_df["ChannelRelief"]]) KDE = st.gaussian_kde(values, bw_method=bw_method) Z = np.reshape(KDE(positions).T, X.shape) #Z = np.ma.masked_where(Z < 0.00000000001, Z) # try a 2d hist # h, fd_bins, elev_bins = np.histogram2d(flow_dist, terrace_df['Elevation'], bins=500) # h = h.T # h = np.ma.masked_where(h == 0, h) # X,Y = np.meshgrid(fd_bins, elev_bins) # ax.pcolormesh(X,Y,h, cmap="seismic") # cmap = cm.gist_heat_r cmap.set_bad(alpha=0) #norm=colors.LogNorm(vmin=0, vmax=Z.max(),cmap=cmap) cb = ax.imshow(Z, interpolation="None", extent=[xmin, xmax, ymin, ymax], cmap=cmap, aspect="auto") #ax.pcolormesh(X,Y,Z, cmap="seismic") # set some plot lims ax.set_xlim(xmin, xmax) ax.set_ylim(ymin, ymax) ax.set_xlabel('Flow distance (km)') ax.set_ylabel('Elevation above channel (m)') # add a colourbar cbar = plt.colorbar(cb, cmap=cmap, orientation='vertical') cbar.set_label('Density') plt.tight_layout() plt.savefig(T_directory + fname_prefix + '_terrace_plot_heat_map_norm.png', format=FigFormat, dpi=300) plt.clf()
def main(argv): # print("On some windows systems you need to set an environment variable GDAL_DATA") # print("If the code crashes here it means the environment variable is not set") # print("Let me check gdal enviroment for you. Currently is is:") # print(os.environ['GDAL_DATA']) #os.environ['GDAL_DATA'] = os.popen('gdal-config --datadir').read().rstrip() #print("Now I am going to get the updated version:") #print(os.environ['GDAL_DATA']) # If there are no arguments, send to the welcome screen if not len(sys.argv) > 1: full_paramfile = print_welcome() sys.exit() # Get the arguments import argparse parser = argparse.ArgumentParser() # The location of the data files parser.add_argument( "-dir", "--base_directory", type=str, help= "The base directory that contains your data files. If this isn't defined I'll assume it's the same as the current directory." ) parser.add_argument( "-fname", "--fname_prefix", type=str, help= "The prefix of your DEM WITHOUT EXTENSION!!! This must be supplied or you will get an error (unless you're running the parallel plotting)." ) # What sort of analyses you want parser.add_argument( "-PR", "--plot_rasters", type=bool, default=False, help= "If this is true, I'll make raster plots of the m/n value and basin keys" ) parser.add_argument( "-chi", "--plot_basic_chi", type=bool, default=False, help= "If this is true I'll make basin chi plots for each basin coloured by elevation." ) parser.add_argument( "-PC", "--plot_chi_profiles", type=bool, default=False, help= "If this is true, I'll make chi-elevation plots for each basin coloured by the MLE" ) parser.add_argument( "-K", "--plot_chi_by_K", type=bool, default=False, help= "If this is true, I'll make chi-elevation plots for each basin coloured by K. NOTE - you MUST have a column in your chi csv with the K value or this will break!" ) parser.add_argument( "-pcbl", "--plot_chi_by_lith", type=bool, default=False, help= "If this is true, I'll make chi-elevation plots for each basin coloured by litho. NOTE - you MUST have a column in your chi csv with the K value or this will break!" ) parser.add_argument( "-PO", "--plot_outliers", type=bool, default=False, help= "If this is true, I'll make chi-elevation plots with the outliers removed" ) parser.add_argument( "-MLE", "--plot_MLE_movern", type=bool, default=False, help= "If this is true, I'll make a plot of the MLE values for each m/n showing how the MLE values change as you remove the tributaries" ) parser.add_argument( "-SA", "--plot_SA_data", type=bool, default=False, help= "If this is true, I'll make a plot of the MLE values for each m/n showing how the MLE values change as you remove the tributaries" ) parser.add_argument( "-MCMC", "--plot_MCMC", type=bool, default=False, help= "If this is true, I'll make a plot of the MCMC analysis. Specify which basins you want with the -basin_keys flag." ) parser.add_argument( "-pts", "--point_uncertainty", type=bool, default=False, help= "If this is true, I'll make a plot of the range in m/n from the MC points analysis" ) parser.add_argument( "-hist", "--plot_histogram", type=bool, default=False, help= "If this is true, I'll make plots of the pdfs of m/n values for each method." ) parser.add_argument( "-disorder", "--plot_disorder", type=bool, default=False, help="If this is true, I'll make plots of the chi disorder analysis.") parser.add_argument( "-SUM", "--plot_summary", type=bool, default=False, help= "If this is true, I'll make the summary CSV file and plot of the best fit concavity from each of the methods." ) parser.add_argument("-ALL", "--all_movern_estimates", type=bool, default=False, help="If this is true, I'll make all the plots") parser.add_argument( "-DisFxnDist", "--disorder_function_of_distance", type=bool, default=False, help= "If this is true, I'll make a plot of the disorder metric as a function of position" ) # Plotting options parser.add_argument( "-points", "--point_analysis", type=bool, default=False, help= "If this is true then I'll assume that you're running the MLE analysis using the point method. Default = False" ) parser.add_argument( "-show_SA_raw", "--show_SA_raw", type=bool, default=True, help="Show the raw S-A data in background of SA plot. Default = True") parser.add_argument( "-show_SA_segments", "--show_SA_segments", type=bool, default=False, help="Show the segmented S-A data in SA plot. Default = False") parser.add_argument( "-test_SA_regression", "--test_SA_regression", type=bool, default=False, help= "If this is true I'll print the regression stats for the slope area plots." ) parser.add_argument( "-show_legend", "--show_legend", type=bool, default=True, help="If this is true, I'll display the legend for plots.") parser.add_argument( "-no_legend", "--no_legend", dest="show_legend", action="store_false", help= "Flag to not display legends, I'll not display the legend for plots, default is for legend to be displayed. Note taht setting show_legend False does not achieve this due to bool issues with python parsing" ) # Options about basin selection parser.add_argument( "-basin_keys", "--basin_keys", type=str, default="", help= "This is a comma delimited string that gets the list of basins you want for the plotting. Default = no basins" ) parser.add_argument( "-basin_lists", "--basin_lists", type=str, default="", help= "This is a string that initiates a list of a list for grouping basins. The object becomes a list of a list but the syntax is comma seperated lists, and each one is separated by a colon. Default = no dict" ) parser.add_argument( "-group_names", "--group_names", type=str, default="", help="Names of the groups provided by basin_lists. Used in legends") # These control the format of your figures parser.add_argument( "-fmt", "--FigFormat", type=str, default='png', help="Set the figure format for the plots. Default is png") parser.add_argument( "-size", "--size_format", type=str, default='ESURF', help= "Set the size format for the figure. Can be 'big' (16 inches wide), 'geomorphology' (6.25 inches wide), or 'ESURF' (4.92 inches wide) (defualt esurf)." ) parser.add_argument( "-animate", "--animate", type=bool, default=True, help= "If this is true I will create an animation of the chi plots. Must be used with the -PC flag set to True." ) parser.add_argument( "-keep_pngs", "--keep_pngs", type=bool, default=False, help= "If this is true I will delete the png files when I animate the figures. Must be used with the -animate flag set to True." ) parser.add_argument( "-parallel", "--parallel", type=bool, default=False, help= "If this is true I'll assume you ran the code in parallel and append all your CSVs together before plotting." ) args = parser.parse_args() print(argv) print(args) if not args.fname_prefix: if not args.parallel: print( "WARNING! You haven't supplied your DEM name. Please specify this with the flag '-fname'" ) sys.exit() # get the base directory if args.base_directory: this_dir = args.base_directory # check if you remembered a / at the end of your path_name if not this_dir.endswith("/"): print( "You forgot the '/' at the end of the directory, appending...") this_dir = this_dir + "/" else: this_dir = os.getcwd() # check the basins print("You told me that the basin keys are: ") print(args.basin_keys) # See if a basin info file exists and if so get the basin list print("Let me check if there is a basins info csv file.") BasinInfoPrefix = args.fname_prefix + "_AllBasinsInfo.csv" BasinInfoFileName = this_dir + BasinInfoPrefix existing_basin_keys = [] if os.path.isfile(BasinInfoFileName): print("There is a basins info csv file") BasinInfoDF = Helper.ReadBasinInfoCSV(this_dir, args.fname_prefix) existing_basin_keys = list(BasinInfoDF['basin_key']) existing_basin_keys = [int(x) for x in existing_basin_keys] else: print( "I didn't find a basins info csv file. Check directory or filename." ) # Parse any lists, dicts, or list of lists from the arguments these_basin_keys = parse_list_from_string(args.basin_keys) basin_stack_list = parse_list_of_list_from_string(args.basin_lists) basin_stack_names = parse_string_list_from_string(args.group_names) # If the basin keys are not supplied then assume all basins are used. if these_basin_keys == []: these_basin_keys = existing_basin_keys # Python is so amazing. Look at the line below. Mask_basin_keys = [ i for i in existing_basin_keys if i not in these_basin_keys ] print("All basins are: ") print(existing_basin_keys) print("The basins to keep are:") print(these_basin_keys) print("The basins to mask are:") print(Mask_basin_keys) # This is an old version. It passes empty strings to the plotting functions. #if len(args.basin_keys) == 0: # print("No basins found, I will plot all of them") # # Note that if you pass an empty list to the plotting functions, they will plot all the basins # these_basin_keys = [] #else: # these_basin_keys = [int(item) for item in args.basin_keys.split(',')] # print("The basins I will plot are:") # print(these_basin_keys) # This checks to see if chi points method is being used. # If not, assumes only the disorder metric has been calculated Using_disorder_metric_only = check_if_disorder_metric_only( this_dir, args.fname_prefix) if not args.parallel: BasinDF = Helper.ReadBasinStatsCSV(this_dir, args.fname_prefix) else: BasinDF = Helper.AppendBasinCSVs(this_dir, args.fname_prefix) # if parallel, get the fname from the data directory. This assumes that your directory is called # something sensible that relates to the DEM name. split_fname = this_dir.split("/") split_fname = split_fname[len(split_fname) - 2] # get the range of moverns, needed for plotting # we need the column headers columns = BasinDF.columns[BasinDF.columns.str.contains( 'm_over_n')].tolist() moverns = [float(x.split("=")[-1]) for x in columns] start_movern = moverns[0] n_movern = len(moverns) x = Decimal((moverns[-1] - moverns[0]) / (n_movern - 1)) d_movern = round(x, 2) print('Start movern, n_movern, d_movern: ') print(start_movern, n_movern, d_movern) # some formatting for the figures if args.FigFormat == "manuscipt_svg": print( "You chose the manuscript svg option. This only works with the -ALL flag. For other flags it will default to simple svg" ) simple_format = "svg" elif args.FigFormat == "manuscript_png": print( "You chose the manuscript png option. This only works with the -ALL flag. For other flags it will default to simple png" ) simple_format = "png" else: simple_format = args.FigFormat # make the plots depending on your choices if args.plot_rasters: MN.MakeRasterPlotsBasins(this_dir, args.fname_prefix, args.size_format, simple_format, parallel=args.parallel) if args.plot_basic_chi: MN.MakePlotsWithMLEStats(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, parallel=args.parallel) if args.plot_chi_profiles: if Using_disorder_metric_only: MN.MakeChiPlotsChi(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat=args.FigFormat, animate=True, keep_pngs=True, parallel=args.parallel) else: MN.MakeChiPlotsMLE(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat=args.FigFormat, animate=True, keep_pngs=True, parallel=args.parallel) if args.plot_chi_by_K: MN.MakeChiPlotsColouredByK(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat=simple_format, animate=args.animate, keep_pngs=args.keep_pngs, parallel=args.parallel) if args.plot_chi_by_lith: MN.MakeChiPlotsColouredByLith(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat=simple_format, animate=args.animate, keep_pngs=args.keep_pngs, parallel=args.parallel) if args.plot_outliers: MN.PlotProfilesRemovingOutliers(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, parallel=args.parallel) if args.plot_MLE_movern: MN.PlotMLEWithMOverN(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat=simple_format, parallel=args.parallel) if args.plot_SA_data: SA.SAPlotDriver(this_dir, args.fname_prefix, FigFormat=simple_format, size_format=args.size_format, show_raw=args.show_SA_raw, show_segments=args.show_SA_segments, basin_keys=these_basin_keys, parallel=args.parallel) if args.test_SA_regression: #SA.TestSARegression(this_dir, args.fname_prefix) SA.LinearRegressionRawDataByChannel(this_dir, args.fname_prefix, basin_list=these_basin_keys, parallel=args.parallel) #SA.LinearRegressionSegmentedData(this_dir, args.fname_prefix, basin_list=these_basin_keys) if args.plot_MCMC: MN.plot_MCMC_analysis(this_dir, args.fname_prefix, basin_list=these_basin_keys, FigFormat=simple_format, size_format=args.size_format, parallel=args.parallel) if args.point_uncertainty: MN.PlotMCPointsUncertainty(this_dir, args.fname_prefix, basin_list=these_basin_keys, FigFormat=simple_format, size_format=args.size_format, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, parallel=args.parallel) if args.plot_histogram: MN.MakeMOverNSummaryHistogram(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=simple_format, size_format=args.size_format, show_legend=args.show_legend, Chi_disorder=True) if args.plot_summary: # This function creates a csv that has the concavity statistics in it MN.CompareMOverNEstimatesAllMethods(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, parallel=args.parallel, Chi_disorder=True) MN.MakeMOverNSummaryPlot(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=simple_format, size_format=args.size_format, show_legend=args.show_legend, parallel=args.parallel, Chi_disorder=True) # This only prints the summary plots for bootstrap and disorder metrics MN.MakeMOverNSummaryPlot(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=simple_format, size_format=args.size_format, show_legend=args.show_legend, parallel=args.parallel, Chi_all=False, SA_raw=False, SA_segmented=False, SA_channels=False, Chi_bootstrap=True, Chi_disorder=True) MN.MakeMOverNSummaryHistogram(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=args.FigFormat, size_format=args.size_format, show_legend=args.show_legend, Chi_disorder=True) if args.plot_disorder: MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, movern_method="Chi_disorder", size_format=args.size_format, FigFormat=args.FigFormat, parallel=args.parallel) # This function creates a csv that has the concavity statistics in it MN.CompareMOverNEstimatesAllMethods(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, parallel=args.parallel, Chi_disorder=True) MN.MakeMOverNSummaryPlot(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=simple_format, size_format=args.size_format, show_legend=args.show_legend, parallel=args.parallel, Chi_disorder=True) MN.MakeMOverNSummaryHistogram(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=args.FigFormat, size_format=args.size_format, show_legend=args.show_legend, Chi_disorder=True) if args.all_movern_estimates: print("I am going to print out loads and loads of figures for you.") # plot the rasters MN.MakeRasterPlotsBasins(this_dir, args.fname_prefix, args.size_format, args.FigFormat, parallel=args.parallel) if not Using_disorder_metric_only: MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, movern_method="Chi_full", size_format=args.size_format, FigFormat=args.FigFormat, parallel=args.parallel) MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, movern_method="Chi_points", size_format=args.size_format, FigFormat=args.FigFormat, parallel=args.parallel) MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, movern_method="SA", size_format=args.size_format, FigFormat=args.FigFormat, parallel=args.parallel) MN.MakeRasterPlotsMOverN(this_dir, args.fname_prefix, start_movern, n_movern, d_movern, movern_method="Chi_disorder", size_format=args.size_format, FigFormat=args.FigFormat, parallel=args.parallel) # make the chi plots if Using_disorder_metric_only: MN.MakeChiPlotsChi(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat=args.FigFormat, animate=True, keep_pngs=True, parallel=args.parallel) else: MN.MakeChiPlotsMLE(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, size_format=args.size_format, FigFormat=args.FigFormat, animate=True, keep_pngs=True, parallel=args.parallel) # make the SA plots SA.SAPlotDriver(this_dir, args.fname_prefix, FigFormat=args.FigFormat, size_format=args.size_format, show_raw=args.show_SA_raw, show_segments=True, basin_keys=these_basin_keys, parallel=args.parallel) SA.SAPlotDriver(this_dir, args.fname_prefix, FigFormat=args.FigFormat, size_format=args.size_format, show_raw=args.show_SA_raw, show_segments=False, basin_keys=these_basin_keys, parallel=args.parallel) #summary plots # This function creates a csv that has the concavity statistics in it MN.CompareMOverNEstimatesAllMethods(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, parallel=args.parallel, Chi_disorder=True) MN.MakeMOverNSummaryPlot(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=simple_format, size_format=args.size_format, show_legend=args.show_legend, parallel=args.parallel, Chi_disorder=True) # This only prints the summary plots for bootstrap and disorder metrics MN.MakeMOverNSummaryPlot(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=simple_format, size_format=args.size_format, show_legend=args.show_legend, parallel=args.parallel, Chi_all=False, SA_raw=False, SA_segmented=False, SA_channels=False, Chi_bootstrap=True, Chi_disorder=True) MN.MakeMOverNSummaryHistogram(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=args.FigFormat, size_format=args.size_format, show_legend=args.show_legend, Chi_disorder=True) if args.disorder_function_of_distance: # This function creates a csv that has the concavity statistics in it print("=====================================================") print("=====================================================") print("\n\n\n\nI am going to get the summary information.") # See if the summary already exists print("Let me check if there is a concavity summary csv file.") SummaryPrefix = args.fname_prefix + "_movern_summary.csv" SummaryFileName = this_dir + "summary_plots/" + SummaryPrefix print("The summary filename is: " + SummaryFileName) if os.path.isfile(SummaryFileName): print("There is already a summary file") else: print("No summray csv found. I will calculate a new one.") MN.CompareMOverNEstimatesAllMethods(this_dir, args.fname_prefix, basin_list=these_basin_keys, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, parallel=args.parallel, Chi_disorder=True) # Okay, now we plot the metrics as a function of distance print("I am going to print the following lists of basins: ") print(basin_stack_list) MN.MakeMOverNDisorderDistancePlot(this_dir, args.fname_prefix, basin_list_list=basin_stack_list, start_movern=start_movern, d_movern=d_movern, n_movern=n_movern, FigFormat=simple_format, size_format=args.size_format, show_legend=args.show_legend, parallel=args.parallel, group_names=basin_stack_names)
def LinearRegressionRawDataByChannel(DataDirectory, DEM_prefix, basin_list=[]): """ This function performs a linear regression on the raw slope-area data separated. by channel. It returns a dataframe with the linear regression info for each basin key. Args: DataDirectory (str): the data directory DEM_prefix (str): the prefix of the DEM_prefix basin_list: a list of the basins to analyse, default = empty (all basins) Returns: pandas dataframe with the linear regression info Author: SMM and FJC """ # read in binned data df = Helper.ReadRawSAData(DataDirectory, DEM_prefix) # get a list of the basins if needed if basin_list == []: print( "You didn't give me a basin list so I will analyse all the basins") basin_list = df['basin_key'].unique() print(basin_list) # now do a linear regression for each basin columns = [ 'basin_key', 'source_key', 'regression_slope', 'std_err', 'R2', 'p_value' ] OutDF = pd.DataFrame(columns=columns) counter = 0 for basin_key in basin_list: this_df = df[df['basin_key'] == basin_key] # get the sources for this basin these_sources = this_df['source_key'].unique() for source in these_sources: #mask the dataframe for this source df_slope = (df[df['source_key'] == source]['slope']).values df_area = (df[df['source_key'] == source]['drainage_area']).values logS = np.log10(df_slope[df_area != 0]) logA = np.log10(df_area[df_area != 0]) slope, intercept, r_value, p_value, std_err = stats.linregress( logA, logS) #print("Slope: " +str(slope)+ " std_err: "+str(std_err)+ " R2 is: " + str(r_value**2) + " p value is: " + str(p_value)) this_basin_key = int(basin_key) this_source_key = int(source) this_row = [ this_basin_key, this_source_key, slope, std_err, r_value**2, p_value ] OutDF.loc[counter] = this_row counter += 1 return OutDF
def SAPlotDriver(DataDirectory, DEM_prefix, FigFormat='show', size_format="ESURF", show_raw=True, show_segments=True, cmap=plt.cm.Set1, n_colours=10, basin_keys=[], parallel=False): """ This is a driver function that manages plotting of Slope-Area data Args: DataDirectory (str): the path to the directory with the csv file DEM_prefix (str): name of your DEM without extension FigFormat (str): The format of the figure. Usually 'png' or 'pdf'. If "show" then it calls the matplotlib show() command. size_format (str): Can be "big" (16 inches wide), "geomorphology" (6.25 inches wide), or "ESURF" (4.92 inches wide) (defualt esurf). show_raw (bool): If true show raw data in background show_segments (bool): If true, show the segmented main stem, cmap (string or colourmap): the colourmap use to colour tributaries n_colours (int): The number of coulours used in plotting tributaries basin_keys (list): A list of the basin keys to plot. If empty, plot all the basins. parallel (bool): If true the data is in multiple files and must be merged Returns: Slope-area plot for each basin Author: SMM """ from LSDPlottingTools import LSDMap_PointTools as PointTools print("These basin keys are: ") print(basin_keys) # read in binned data binned_csv_fname = DataDirectory + DEM_prefix + '_SAbinned.csv' print("I'm reading in the csv file " + binned_csv_fname) if not parallel: binnedPointData = PointTools.LSDMap_PointData(binned_csv_fname) else: binnedPointData = Helper.AppendSABinnedCSVs(DataDirectory, DEM_prefix) binnedPointData = PointTools.LSDMap_PointData(binned_csv_fname) # Read in the raw data if (show_raw): print("I am going to show the raw data.") all_csv_fname = DataDirectory + DEM_prefix + '_SAvertical.csv' if not parallel: allPointData = PointTools.LSDMap_PointData(all_csv_fname) else: allPointData = Helper.AppendSAVerticalCSVs(DataDirectory, DEM_prefix) allPointData = PointTools.LSDMap_PointData(all_csv_fname) # Read in the segmented data if (show_segments): print("I am going to show segments on the main stem.") segmented_csv_fname = DataDirectory + DEM_prefix + '_SAsegmented.csv' if not parallel: segmentedPointData = PointTools.LSDMap_PointData( segmented_csv_fname) else: segmentedPointData = Helper.AppendSASegmentedCSVs( DataDirectory, DEM_prefix) segmentedPointData = PointTools.LSDMap_PointData( segmented_csv_fname) # get the basin keys and check if the basins in the basin list exist basin = binnedPointData.QueryData('basin_key') basin = [int(x) for x in basin] Basin = np.asarray(basin) these_basin_keys = np.unique(Basin) print("The unique basin keys are: ") print(these_basin_keys) final_basin_keys = [] # A bit of logic for checking keys if (len(basin_keys) == 0): final_basin_keys = these_basin_keys else: for basin in basin_keys: if basin not in these_basin_keys: print("You were looking for basin " + str(basin) + " but it isn't in the basin keys.") else: final_basin_keys.append(basin) print("The final basin keys are:") print(final_basin_keys) this_cmap = cmap print("There are " + str(len(final_basin_keys)) + "basins that I will plot") #basin_keys.append(0) # Loop through the basin keys, making a plot for each one for basin_key in final_basin_keys: print("I am making a plot for basin: " + str(basin_key)) if (show_segments): if (show_raw): FileName = DEM_prefix + '_SA_plot_raw_and_segmented_basin%s.%s' % ( str(basin_key), FigFormat) SegmentedWithRawSlopeAreaPlot(segmentedPointData, allPointData, DataDirectory, FigFileName=FileName, FigFormat=FigFormat, size_format=size_format, basin_key=basin_key, cmap=this_cmap, n_colours=n_colours) else: FileName = DEM_prefix + '_SA_plot_segmented_basin%s.%s' % ( str(basin_key), FigFormat) SegmentedSlopeAreaPlot(segmentedPointData, DataDirectory, FigFileName=FileName, FigFormat=FigFormat, size_format=size_format, basin_key=basin_key) else: if (show_raw): FileName = DEM_prefix + '_SA_plot_raw_and_binned_basin%s.%s' % ( str(basin_key), FigFormat) BinnedWithRawSlopeAreaPlot(DataDirectory, DEM_prefix, FigFileName=FileName, FigFormat=FigFormat, size_format=size_format, basin_key=basin_key, n_colours=n_colours, cmap=this_cmap) else: print( "You selected an option that doesn't produce any plots. Turn either show raw or show segments to True." )
def BinnedWithRawSlopeAreaPlot(DataDirectory, fname_prefix, FigFileName='Image.pdf', FigFormat='show', size_format="ESURF", basin_key='0', cmap=plt.cm.Set1, n_colours=5): """ This function makes a slope-area plot from the chi mapping tool using the binned data. It plots all the sources and all the raw data as semitransparent background. Args: DataDirectory (str): the data directory fname_prefix (str): the prefix of your DEM FigFileName (str): The name of the figure file FigFormat (str): The format of the figure. Usually 'png' or 'pdf'. If "show" then it calls the matplotlib show() command. size_format (str): Can be "big" (16 inches wide), "geomorphology" (6.25 inches wide), or "ESURF" (4.92 inches wide) (defualt esurf). basin_key (int): the ID of the basin to make the plot for. colourmap (string or colormap object): The colourmap n_colour (int): The number of colours Returns: Does not return anything but makes a plot. Author: SMM (panda-fied by FJC) """ import matplotlib.colors as colors import matplotlib.ticker # check if a directory exists for the SA plots. If not then make it. SA_directory = DataDirectory + 'SA_plots/' if not os.path.isdir(SA_directory): os.makedirs(SA_directory) # Set up fonts for plots label_size = 10 rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['arial'] rcParams['font.size'] = label_size # make a figure if size_format == "geomorphology": fig = plt.figure(1, facecolor='white', figsize=(6.25, 3.5)) l_pad = -40 elif size_format == "big": fig = plt.figure(1, facecolor='white', figsize=(16, 9)) l_pad = -50 else: fig = plt.figure(1, facecolor='white', figsize=(4.92126, 3.5)) l_pad = -35 gs = plt.GridSpec(100, 100, bottom=0.15, left=0.1, right=1.0, top=1.0) ax = fig.add_subplot(gs[25:100, 10:95]) # Get the raw data and mask to the basin RawDF = Helper.ReadRawSAData(DataDirectory, fname_prefix) RawDF = RawDF[RawDF['basin_key'] == basin_key] # get the binend data BinnedDF = Helper.ReadBinnedSAData(DataDirectory, fname_prefix) BinnedDF = BinnedDF[BinnedDF['basin_key'] == basin_key] # make a color map of fixed colors NUM_COLORS = n_colours # First we set the colourmap #this_cmap = plt.cm.Set1 this_cmap = cmap # then we use a normalization to map the colours between 0 and NUM_COLORS-1 cNorm = colors.Normalize(vmin=0, vmax=NUM_COLORS - 1) # Now we make a scalar map. This is used to convert values in your dataset # to values between 0 and 1 that can be called to convert to rgba scalarMap = plt.cm.ScalarMappable(norm=cNorm, cmap=this_cmap) # If you want RGBA from this you use: rgba_color = scalarMap.to_rgba(this_data) # now get the sources sources = np.unique(RawDF['source_key'].as_matrix()) print(sources) # get the regression stats for this basin RegressionDF = LinearRegressionRawData(DataDirectory, fname_prefix) this_movern = round(RegressionDF.get_value(basin_key, 'regression_slope'), 2) # Mask the data of the segments sequentially for idx, source in enumerate(sources): print("Source key: " + str(source)) # mask to just get the data for the basin of interest RawDFMask = RawDF[RawDF['source_key'] == source] BinnedDFMask = BinnedDF[BinnedDF['source_key'] == source] SourceMedianLogSlope = 10**(BinnedDFMask['median_log_S'].as_matrix()) SourceMedianLogArea = 10**(BinnedDFMask['median_log_A'].as_matrix()) print(SourceMedianLogArea) # Now add the colours for the segments source_colour = idx % n_colours tps_color = scalarMap.to_rgba(source_colour) ax.scatter(SourceMedianLogArea, SourceMedianLogSlope, c=tps_color, s=10, marker="o", lw=0.5, edgecolors='k', zorder=100) # get the errors first_quartile = 10**(BinnedDFMask['logS_FirstQuartile'].as_matrix()) third_quartile = 10**(BinnedDFMask['logS_ThirdQuartile'].as_matrix()) yerr_up = third_quartile - SourceMedianLogSlope yerr_down = SourceMedianLogSlope - first_quartile plt.errorbar(SourceMedianLogArea, SourceMedianLogSlope, yerr=[yerr_down, yerr_up], fmt='o', ms=1, ecolor=tps_color, zorder=0) # Plot the raw data SourceRawMedianS = RawDFMask['slope'] SourceRawMedianA = RawDFMask['drainage_area'] ax.scatter(SourceRawMedianA, SourceRawMedianS, c=tps_color, s=4, marker="+", lw=0.5, edgecolors='k', zorder=-10, alpha=0.3) ax.set_xlabel('Drainage area (m$^2$)') ax.set_ylabel('Slope (m/m)') # log ax.set_xscale('log') ax.set_yscale('log') ax.set_title('Basin ' + str(basin_key) + ', best fit ' + r'$\theta$' + ' = ' + str(this_movern), fontsize=10) # set axis limits #x_pad = 1000 #y_pad = 0.0000001 #ax.set_ylim(np.min(MedianLogSlope)-y_pad,0) #ax.set_xlim(np.min(MeanLogArea)-x_pad,np.max(MeanLogArea)+y_pad) # return or show the figure print("The figure format is: " + FigFormat) if FigFormat == 'show': plt.show() elif FigFormat == 'return': return fig # return the axes object so can make nice subplots with the other plotting tools? else: save_fmt = FigFormat plt.savefig(SA_directory + FigFileName, format=save_fmt, dpi=500) fig.clf()