def set_pow2format(ax, locs): formatter = FuncFormatter(pow2formatter) ax.xaxis.set_major_formatter(formatter) ax.xaxis.set_minor_formatter(NullFormatter()) ax.set_xticks(locs)
linestyle='--', label=labs[count]) count += 1 plt.xlabel('Iteration') plt.ylabel('Train loss') plt.xscale('log') plt.grid() ax = fig.add_subplot(224) plt.plot(SGDfc['0.1']['HD Multiplicative']['val_loss'], color='black', label=r'SGDM HD $\alpha_0=0.02$') count = 0 for i in list(SGDfc.keys()): plt.plot(SGDfc[i]['HD']['val_loss'], color=cols[count], linestyle='-', label=labsHD[count]) plt.plot(SGDfc[i]['Keras']['val_loss'], color=lcols[count], linestyle='--', label=labs[count]) count += 1 plt.yscale('log') plt.ylabel('Validation loss') plt.xlabel('Epoch') plt.grid() plt.gca().yaxis.set_minor_formatter(NullFormatter()) plt.show()
import os import numpy as np import netCDF4 import salem from oggm.utils import entity_task import cleo # Local imports import oggm.cfg as cfg # Module logger log = logging.getLogger(__name__) nullfmt = NullFormatter() # no labels def truncate_colormap(cmap, minval=0.0, maxval=1.0, n=256): """Remove extreme colors from colormap.""" new_cmap = colors.LinearSegmentedColormap.from_list( 'trunc({n},{a:.2f},{b:.2f})'.format(n=cmap.name, a=minval, b=maxval), cmap(np.linspace(minval, maxval, n))) return new_cmap def _plot_map(plotfunc): """ Decorator for common Cleo.Map plotting logic """ commondoc = """
def scatter_hist(stats_list_x, stats_list_y, labels=None, nbins=200, s=5, prct_bounds=[0.1, 99.9]): if labels is None: labels = [None] * len(stats_list_x) x_lb = np.percentile(np.hstack(stats_list_x), prct_bounds[0]) x_hb = np.percentile(np.hstack(stats_list_x), prct_bounds[1]) x_bins = np.linspace(x_lb, x_hb, nbins) y_lb = np.percentile(np.hstack(stats_list_y), prct_bounds[0]) y_hb = np.percentile(np.hstack(stats_list_y), prct_bounds[1]) y_bins = np.linspace(y_lb, y_hb, nbins) ### # Parts C&P'd from https://matplotlib.org/examples/pylab_examples/scatter_hist.html ### nullfmt = NullFormatter() # no labels # definitions for the axes left, width = 0.1, 0.65 bottom, height = 0.1, 0.65 bottom_h = left_h = left + width + 0.02 rect_scatter = [left, bottom, width, height] rect_histx = [left, bottom_h, width, 0.2] rect_histy = [left_h, bottom, 0.2, height] # start with a rectangular Figure plt.figure(1, figsize=(8, 8)) axHistx = plt.axes(rect_histx) axHisty = plt.axes(rect_histy) # no labels axHistx.xaxis.set_major_formatter(nullfmt) axHisty.yaxis.set_major_formatter(nullfmt) axScatter = plt.axes(rect_scatter) # the scatter plot: alpha = 1 / len(stats_list_x) colors = ["c", "m", "c", "y"] for x, y, color, label in zip(stats_list_x, stats_list_y, colors, labels): axHistx.hist(x, bins=x_bins, color=color, alpha=alpha, density=True) axHisty.hist( y, bins=y_bins, color=color, alpha=alpha, density=True, orientation="horizontal", ) axHistx.set_xlim(axScatter.get_xlim()) axHisty.set_ylim(axScatter.get_ylim()) axScatter.scatter(x, y, s=s, c=color, alpha=alpha, label=label) axScatter.set_xlim((x_lb, x_hb)) axScatter.set_ylim((y_lb, y_hb)) if not np.all(np.array([label is None for label in labels])): plt.legend(loc="upper right")
def __init__(self, fig=None, rotation=30, subplot=None, rect=None, aspect=80.5): r"""Create SkewT - logP plots. Parameters ---------- fig : matplotlib.figure.Figure, optional Source figure to use for plotting. If none is given, a new :class:`matplotlib.figure.Figure` instance will be created. rotation : float or int, optional Controls the rotation of temperature relative to horizontal. Given in degrees counterclockwise from x-axis. Defaults to 30 degrees. subplot : tuple[int, int, int] or `matplotlib.gridspec.SubplotSpec` instance, optional Controls the size/position of the created subplot. This allows creating the skewT as part of a collection of subplots. If subplot is a tuple, it should conform to the specification used for :meth:`matplotlib.figure.Figure.add_subplot`. The :class:`matplotlib.gridspec.SubplotSpec` can be created by using :class:`matplotlib.gridspec.GridSpec`. rect : tuple[float, float, float, float], optional Rectangle (left, bottom, width, height) in which to place the axes. This allows the user to place the axes at an arbitrary point on the figure. aspect : float, int, or 'auto', optional Aspect ratio (i.e. ratio of y-scale to x-scale) to maintain in the plot. Defaults to 80.5. Passing the string ``'auto'`` tells matplotlib to handle the aspect ratio automatically (this is not recommended for SkewT). """ if fig is None: import matplotlib.pyplot as plt figsize = plt.rcParams.get('figure.figsize', (7, 7)) fig = plt.figure(figsize=figsize) self._fig = fig if rect and subplot: raise ValueError( "Specify only one of `rect' and `subplot', but not both") elif rect: self.ax = fig.add_axes(rect, projection='skewx', rotation=rotation) else: if subplot is not None: # Handle being passed a tuple for the subplot, or a GridSpec instance try: len(subplot) except TypeError: subplot = (subplot, ) else: subplot = (1, 1, 1) self.ax = fig.add_subplot(*subplot, projection='skewx', rotation=rotation) # Set the yaxis as inverted with log scaling self.ax.set_yscale('log') # Override default ticking for log scaling self.ax.yaxis.set_major_formatter(ScalarFormatter()) self.ax.yaxis.set_major_locator(MultipleLocator(100)) self.ax.yaxis.set_minor_formatter(NullFormatter()) # Needed to make sure matplotlib doesn't freak out and create a bunch of ticks # Also takes care of inverting the y-axis self.ax.set_ylim(1050, 100) self.ax.yaxis.set_units(units.hPa) # Try to make sane default temperature plotting ticks self.ax.xaxis.set_major_locator(MultipleLocator(10)) self.ax.xaxis.set_units(units.degC) self.ax.set_xlim(-40, 50) self.ax.grid(True) self.mixing_lines = None self.dry_adiabats = None self.moist_adiabats = None # Maintain a reasonable ratio of data limits. Only works on Matplotlib >= 3.2 if matplotlib.__version__[:3] > '3.1': self.ax.set_aspect(aspect, adjustable='box')
def plot(i12ind1, i12ind2, i12val, orbinit, orbfinal, thresh, s1index, s1value): try: import matplotlib.pyplot as plt import matplotlib.lines as mlines from matplotlib.ticker import NullFormatter except ImportError: if log.do_warning: log.warn('Skipping plots because matplotlib was not found.') return norb = orbfinal - orbinit orbitals = np.arange(orbinit, orbfinal) theta = 2 * np.pi * (orbitals - orbinit) / (norb) r = 22 * np.ones(norb, int) - 3.00 * ((orbitals - orbinit) % 3) plt.figure(figsize=(10, 5)) ax = plt.subplot(121, polar=True) ax.grid(False) ax.set_theta_zero_location("N") ax.set_theta_direction(-1) plt.plot(theta, r, 'o', markersize=12, alpha=0.2) for i in range(len(orbitals)): plt.annotate( i + 1 + orbinit, xy=(theta[i], r[i]), xytext=(0, 0), textcoords='offset points', ha='center', va='bottom', fontsize=8, fontweight='bold', ) ax.yaxis.set_data_interval(0, 22.5) ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter()) legend = [] for ind in range(len(i12val)): if i12val[ind] >= thresh: if i12val[ind] >= 0.0001 and i12val[ind] < 0.001: plt.plot([ theta[i12ind1[ind] - orbinit], theta[i12ind2[ind] - orbinit] ], [r[i12ind1[ind] - orbinit], r[i12ind2[ind] - orbinit]], ':', lw=2, color='orange') if i12val[ind] >= 0.001 and i12val[ind] < 0.01: plt.plot([ theta[i12ind1[ind] - orbinit], theta[i12ind2[ind] - orbinit] ], [r[i12ind1[ind] - orbinit], r[i12ind2[ind] - orbinit]], '-.', lw=2, color='g') if i12val[ind] >= 0.01 and i12val[ind] < 0.1: plt.plot([ theta[i12ind1[ind] - orbinit], theta[i12ind2[ind] - orbinit] ], [r[i12ind1[ind] - orbinit], r[i12ind2[ind] - orbinit]], '--', lw=2, color='r') if i12val[ind] >= 0.1: plt.plot([ theta[i12ind1[ind] - orbinit], theta[i12ind2[ind] - orbinit] ], [r[i12ind1[ind] - orbinit], r[i12ind2[ind] - orbinit]], '-', lw=3, color='b') blue_line = mlines.Line2D([], [], color='blue', marker='', lw=3, ls='-', label='0.1') red_line = mlines.Line2D([], [], color='red', marker='', lw=2, ls='--', label='0.01') green_line = mlines.Line2D([], [], color='green', marker='', ls='-.', lw=2, label='0.001') orange_line = mlines.Line2D([], [], color='orange', marker='', ls=':', lw=2, label='0.0001') if thresh >= 0.0001 and thresh < 0.001: legend.append(blue_line) legend.append(red_line) legend.append(green_line) legend.append(orange_line) if thresh >= 0.001 and thresh < 0.01: legend.append(blue_line) legend.append(red_line) legend.append(green_line) if thresh >= 0.01 and thresh < 0.1: legend.append(blue_line) legend.append(red_line) if thresh >= 0.1: legend.append(blue_line) plt.legend(handles=legend, loc='center', fancybox=True, fontsize=10) plt.title('Mutual information') ax2 = plt.subplot(122) ax2.axis([orbinit, orbfinal, 0, 0.71]) ax2.vlines(s1index, [0], s1value, color='r', linewidth=2, linestyle='-') plt.ylabel('single-orbital entropy') plt.xlabel('Orbital index') plt.plot(s1index, s1value, 'ro', markersize=8) plt.savefig('orbital_entanglement.png', dpi=300)
def vis_accuracy(X, Y, title1='', title2='', xlab='', ylab=''): ''' Arg X = a list of tuple where a tuple = (X_values, accuracy) Y = a list of tuple where a tuple = (y_values, accuracy) ''' # data for the scatter plot x = list(map(lambda x: x[0], X)) y = list(map(lambda x: x[0], Y)) # data for the best fit plot x2, acc_x = list(zip(*sorted(X, key=lambda x: x[0]))) y2, acc_y = list(zip(*sorted(Y, key=lambda x: x[0]))) nullfmt = NullFormatter() # no labels # definitions for the axes left, width = 0.1, 0.65 bottom, height = 0.1, 0.65 bottom_h = left_h = left + width + 0.02 rect_scatter = [left, bottom, width, height] rect_linex = [left, bottom_h, width, 0.2] rect_liney = [left_h, bottom, 0.2, height] # start with a rectangular Figure plt.figure(1, figsize=(8, 8)) axScatter = plt.axes(rect_scatter) axLinex = plt.axes(rect_linex) #, sharey=axScatter) axLiney = plt.axes(rect_liney) #, sharex=axScatter) # no labels axLinex.xaxis.set_major_formatter(nullfmt) axLiney.yaxis.set_major_formatter(nullfmt) # the scatter plot: axScatter.scatter(x, y) axScatter.grid(b=True, which='major', color='gray', linestyle='--') axScatter.set_xlabel(xlab) axScatter.set_ylabel(ylab) # for plotting the best fit accuracy curve num_pts = 100 order = 3 coeffs_x = np.polyfit(x2, acc_x, order) x3 = np.arange(num_pts + 1) * (np.max(x2) - np.min(x2)) / num_pts + np.min(x2) fit_x = np.polyval(coeffs_x, x3) coeffs_y = np.polyfit(y2, acc_y, order) y3 = np.arange(num_pts + 1) * (np.max(y2) - np.min(y2)) / num_pts + np.min(y2) fit_y = np.polyval(coeffs_y, y3) # plot the curve and place dots on the curve axLinex.plot(x3, fit_x) axLinex.scatter(x2, acc_x) axLinex.grid(b=True, which='major', color='gray', linestyle='--') axLinex.set_title(title1) axLiney.plot(fit_y, y3) axLiney.scatter(acc_y, y2) axLiney.grid(b=True, which='major', color='gray', linestyle='--') axLiney.set_title(title2) plt.show()
def draw_bace_example_graph(): # active/inactive example for i in range(8, 11): results = [] with open( "../../experiment/figure/rotation_single_bace_{}_x.csv".format( i)) as file: reader = csv.reader(file) for row in reader: result = [float(r) for r in row ] # ex [0, 45, 90, 135, 180, 225, 270, 315] results.append([ *result[len(result) // 2:], *result[:len(result) // 2 + 1] ]) active_results = results[:len(results) // 2] inactive_results = results[len(results) // 2:] print(active_results) results = [] with open("../../experiment/figure/rotation_single_bacer_{}_x.csv". format(i)) as file: reader = csv.reader(file) for row in reader: result = [float(r) for r in row ] # ex [0, 45, 90, 135, 180, 225, 270, 315] results.append([ *result[len(result) // 2:], *result[:len(result) // 2 + 1] ]) active_results2 = results[:len(results) // 2] inactive_results2 = results[len(results) // 2:] print(active_results2) major_tick = MultipleLocator(9) major_formatter = FixedFormatter([ "", "-180", "-135", "-90", "-45", "0", "+45", "+90", "+135", "+180" ]) minor_tick = MultipleLocator(9) x = np.arange(len(active_results[0])) for j in range(len(active_results)): # plt.figure(figsize=(8, 2.5)) plt.figure(figsize=(17, 2.5)) ax = plt.subplot(1, 1, 1) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) # Active plt.plot(x, active_results[j], color="#000000", linewidth=2, linestyle="solid") plt.plot(x, active_results2[j], color="#000000", linewidth=2, linestyle="dashed") # Left ticks ax.xaxis.set_major_locator(major_tick) ax.xaxis.set_major_formatter(major_formatter) ax.xaxis.set_minor_locator(minor_tick) ax.xaxis.set_minor_formatter(NullFormatter()) plt.ylim(0, 1) plt.yticks(np.arange(0, 1.01, 0.5), ("0.0", "0.5", "1.0")) fig_name = "../../experiment/figure/ex/rotation_single_trial{}_a{}_x.png".format( i, j) plt.savefig(fig_name, dpi=600) plt.clf() print("Saved figure on {}".format(fig_name)) # plt.figure(figsize=(8, 2.5)) plt.figure(figsize=(17, 2.5)) ax = plt.subplot(1, 1, 1) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) # Inactive plt.plot(x, inactive_results[j], color="#000000", linewidth=2, linestyle="solid") plt.plot(x, inactive_results2[j], color="#000000", linewidth=2, linestyle="dashed") # Left ticks ax.xaxis.set_major_locator(major_tick) ax.xaxis.set_major_formatter(major_formatter) ax.xaxis.set_minor_locator(minor_tick) ax.xaxis.set_minor_formatter(NullFormatter()) plt.ylim(0, 1) plt.yticks(np.arange(0, 1.01, 0.5), ("0.0", "0.5", "1.0")) fig_name = "../../experiment/figure/ex/rotation_single_trial{}_i{}_x.png".format( i, j) plt.savefig(fig_name, dpi=600) plt.clf() print("Saved figure on {}".format(fig_name))
def test_plot_compare_methods(self): """ """ if 'CUDA_VISIBLE_DEVICES' not in os.environ: os.environ['CUDA_VISIBLE_DEVICES'] = '0' if 'PORT' not in os.environ: os.environ['PORT'] = '6006' if 'TIME_STR' not in os.environ: os.environ['TIME_STR'] = '0' if utils.is_debugging() else '1' # func name assert sys._getframe().f_code.co_name.startswith('test_') command = sys._getframe().f_code.co_name[5:] class_name = self.__class__.__name__[7:] \ if self.__class__.__name__.startswith('Testing') \ else self.__class__.__name__ outdir = f'results/{class_name}/{command}' from datetime import datetime TIME_STR = bool(int(os.getenv('TIME_STR', 0))) time_str = datetime.now().strftime("%Y%m%d-%H_%M_%S_%f")[:-3] outdir = outdir if not TIME_STR else (outdir + '_' + time_str) print(outdir) import collections, shutil shutil.rmtree(outdir, ignore_errors=True) os.makedirs(outdir, exist_ok=True) from collections import OrderedDict from functools import partial from time import time import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from matplotlib.ticker import NullFormatter from sklearn import manifold, datasets # Next line to silence pyflakes. This import is needed. Axes3D n_points = 1000 X, color = datasets.make_s_curve(n_points, random_state=0) n_neighbors = 10 n_components = 2 # Create figure fig = plt.figure(figsize=(15, 8)) fig.suptitle("Manifold Learning with %i points, %i neighbors" % (1000, n_neighbors), fontsize=14) # Add 3d scatter plot ax = fig.add_subplot(251, projection='3d') ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=color, cmap=plt.cm.Spectral) ax.view_init(4, -72) # Set-up manifold methods LLE = partial(manifold.LocallyLinearEmbedding, n_neighbors, n_components, eigen_solver='auto') methods = OrderedDict() methods['LLE'] = LLE(method='standard') methods['LTSA'] = LLE(method='ltsa') methods['Hessian LLE'] = LLE(method='hessian') methods['Modified LLE'] = LLE(method='modified') methods['Isomap'] = manifold.Isomap(n_neighbors, n_components) methods['MDS'] = manifold.MDS(n_components, max_iter=100, n_init=1) methods['SE'] = manifold.SpectralEmbedding(n_components=n_components, n_neighbors=n_neighbors) methods['t-SNE'] = manifold.TSNE(n_components=n_components, init='pca', random_state=0) # Plot results for i, (label, method) in enumerate(methods.items()): t0 = time() Y = method.fit_transform(X) t1 = time() print("%s: %.2g sec" % (label, t1 - t0)) ax = fig.add_subplot(2, 5, 2 + i + (i > 3)) ax.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral) ax.set_title("%s (%.2g sec)" % (label, t1 - t0)) ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter()) ax.axis('tight') plt.show() pass
def draw_single_cluster_plot(plotFilePath, dots): # print "# creating data from dots" X = [] Y = [] for x, y in dots: X.append(float(x)) Y.append(float(y)) nullfmt = NullFormatter() # no labels # print "# definitions for the axes" left, width = 0.1, 0.65 bottom, height = 0.1, 0.65 bottom_h = left_h = left + width + 0.02 rect_scatter = [left, bottom, width, height] rect_histx = [left, bottom_h, width, 0.2] rect_histy = [left_h, bottom, 0.2, height] # print "start with a rectangular Figure" plt.figure(1, figsize=(8, 8)) axScatter = plt.axes(rect_scatter) axHistx = plt.axes(rect_histx) axHisty = plt.axes(rect_histy) # print "# no labels" axHistx.xaxis.set_major_formatter(nullfmt) axHisty.yaxis.set_major_formatter(nullfmt) # print "# the scatter plot:" # axScatter.scatter(X, Y) axScatter.plot(X, Y, '.') # print "# now determine nice limits by hand:" # binwidth = 0.25 # nbBins = 60 # print " -1" # xymax = np.max( [np.max(np.fabs(X)), np.max(np.fabs(Y))] ) # xymin = np.min( [np.min(np.fabs(X)), np.min(np.fabs(Y))] ) xmax = np.max(X) ymax = np.max(Y) xmin = np.min(X) ymin = np.min(Y) # taille fixe sur les histo # xmin=0;xmax=score_max # ymin=0;ymax=rmsd_max # print xymax,xymin # print " -2" # lim = ( int(xymax/binwidth) + 1) * binwidth # print " -3" # axScatter.set_xlim( (-lim, lim) ) # axScatter.set_ylim( (-lim, lim) ) axScatter.set_xlim((xmin * .9, xmax * 1.1)) axScatter.set_ylim((ymin * .9, ymax * 1.1)) # print " -4" # bins = np.arange(-lim, lim + binwidth, binwidth) binwidth = (xmax * 1.1 - xmin * .9) / nbBins xBins = np.arange(xmin * .9, xmax * 1.1 + binwidth, binwidth) binwidth = (ymax * 1.1 - ymin * .9) / nbBins yBins = np.arange(ymin * .9, ymax * 1.1 + binwidth, binwidth) # print " -5" axHistx.hist(X, bins=xBins) # print " -6" axHisty.hist(Y, bins=yBins, orientation='horizontal') # print "making histograms" axHistx.set_xlim(axScatter.get_xlim()) axHisty.set_ylim(axScatter.get_ylim()) print "# save plot" plt.savefig(plotFilePath) plt.clf()
def draw_example_graph(dataset, trial_path): results = [] with open(trial_path + "/rotation_single_x.csv") as file: reader = csv.reader(file) for row in reader: result = [float(r) for r in row] # ex [0, 45, 90, 135, 180, 225, 270, 315] results.append( [*result[len(result) // 2:], *result[:len(result) // 2 + 1]]) major_tick = MultipleLocator(18) major_formatter = FixedFormatter(["", "-180", "-90", "0", "+90", "+180"]) minor_tick = MultipleLocator(9) x = np.arange(len(results[0])) # Draw figure for j in range(0, min(len(results), 5)): if "bace" in trial_path or "hiv" in trial_path: plt.figure(figsize=(8, 2.5)) ax = plt.subplot(1, 1, 1) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) plt.plot(x, results[j], color="#000000", linewidth=2) # Left ticks ax.xaxis.set_major_locator(major_tick) ax.xaxis.set_major_formatter(major_formatter) ax.xaxis.set_minor_locator(minor_tick) ax.xaxis.set_minor_formatter(NullFormatter()) plt.ylim(0, 1) plt.yticks(np.arange(0, 1.01, 0.5), ("0.0", "0.5", "1.0")) fig_name = "../../experiment/figure/ex/rotation_single_{}_{}_x.png".format( dataset, j) plt.savefig(fig_name, dpi=600) plt.clf() print("Saved figure on {}".format(fig_name)) else: # Figure plt.figure(figsize=(8, 2.5)) ax = plt.subplot(1, 1, 1) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) y = results[j] mean_y = np.average(y) ylim = (mean_y - 1.5, mean_y + 1.5) plt.plot(x, y, color="#000000", linewidth=2) # Ticks ax.xaxis.set_major_locator(major_tick) ax.xaxis.set_major_formatter(major_formatter) ax.xaxis.set_minor_locator(minor_tick) ax.xaxis.set_minor_formatter(NullFormatter()) plt.ylim(ylim) fig_name = "../../experiment/figure/ex/rotation_single_{}_{}_x.png".format( dataset, j) plt.savefig(fig_name, dpi=600) plt.clf() print("Saved figure on {}".format(fig_name))
def plot_pair( ax, infdata_group, numvars, figsize, textsize, kind, scatter_kwargs, kde_kwargs, hexbin_kwargs, gridsize, colorbar, divergences, diverging_mask, divergences_kwargs, flat_var_names, backend_kwargs, marginal_kwargs, show, marginals, point_estimate, point_estimate_kwargs, point_estimate_marker_kwargs, reference_values, reference_values_kwargs, ): """Matplotlib pairplot.""" if backend_kwargs is None: backend_kwargs = {} backend_kwargs = { **backend_kwarg_defaults(), **backend_kwargs, } backend_kwargs.pop("constrained_layout") scatter_kwargs = matplotlib_kwarg_dealiaser(scatter_kwargs, "scatter") scatter_kwargs.setdefault("marker", ".") scatter_kwargs.setdefault("lw", 0) # Sets the default zorder higher than zorder of grid, which is 0.5 scatter_kwargs.setdefault("zorder", 0.6) if kde_kwargs is None: kde_kwargs = {} if hexbin_kwargs is None: hexbin_kwargs = {} hexbin_kwargs.setdefault("mincnt", 1) divergences_kwargs = matplotlib_kwarg_dealiaser(divergences_kwargs, "plot") divergences_kwargs.setdefault("marker", "o") divergences_kwargs.setdefault("markeredgecolor", "k") divergences_kwargs.setdefault("color", "C1") divergences_kwargs.setdefault("lw", 0) if marginal_kwargs is None: marginal_kwargs = {} point_estimate_kwargs = matplotlib_kwarg_dealiaser(point_estimate_kwargs, "fill_between") if kind != "kde": kde_kwargs.setdefault("contourf_kwargs", {}) kde_kwargs["contourf_kwargs"].setdefault("alpha", 0) kde_kwargs.setdefault("contour_kwargs", {}) kde_kwargs["contour_kwargs"].setdefault("colors", "k") if reference_values: reference_values_copy = {} label = [] for variable in list(reference_values.keys()): if " " in variable: variable_copy = variable.replace(" ", "\n", 1) else: variable_copy = variable label.append(variable_copy) reference_values_copy[variable_copy] = reference_values[variable] difference = set(flat_var_names).difference(set(label)) if difference: warn = [diff.replace("\n", " ", 1) for diff in difference] warnings.warn( "Argument reference_values does not include reference value for: {}".format( ", ".join(warn) ), UserWarning, ) reference_values_kwargs = matplotlib_kwarg_dealiaser(reference_values_kwargs, "plot") reference_values_kwargs.setdefault("color", "C3") reference_values_kwargs.setdefault("marker", "o") point_estimate_marker_kwargs = matplotlib_kwarg_dealiaser( point_estimate_marker_kwargs, "scatter" ) point_estimate_marker_kwargs.setdefault("marker", "s") point_estimate_marker_kwargs.setdefault("color", "C1") # pylint: disable=too-many-nested-blocks if numvars == 2: (figsize, ax_labelsize, _, xt_labelsize, linewidth, markersize) = _scale_fig_size( figsize, textsize, numvars - 1, numvars - 1 ) backend_kwargs.setdefault("figsize", figsize) marginal_kwargs.setdefault("plot_kwargs", {}) marginal_kwargs["plot_kwargs"].setdefault("linewidth", linewidth) point_estimate_marker_kwargs.setdefault("s", markersize + 50) # Flatten data x = infdata_group[0].flatten() y = infdata_group[1].flatten() if ax is None: if marginals: # Instantiate figure and grid widths = [2, 2, 2, 1] heights = [1.4, 2, 2, 2] fig = plt.figure(**backend_kwargs) grid = plt.GridSpec( 4, 4, hspace=0.1, wspace=0.1, figure=fig, width_ratios=widths, height_ratios=heights, ) # Set up main plot ax = fig.add_subplot(grid[1:, :-1]) # Set up top KDE ax_hist_x = fig.add_subplot(grid[0, :-1], sharex=ax) ax_hist_x.set_yticks([]) # Set up right KDE ax_hist_y = fig.add_subplot(grid[1:, -1], sharey=ax) ax_hist_y.set_xticks([]) ax_return = np.array([[ax_hist_x, None], [ax, ax_hist_y]]) for val, ax_, rotate in ((x, ax_hist_x, False), (y, ax_hist_y, True)): plot_dist(val, textsize=xt_labelsize, rotated=rotate, ax=ax_, **marginal_kwargs) # Personalize axes ax_hist_x.tick_params(labelleft=False, labelbottom=False) ax_hist_y.tick_params(labelleft=False, labelbottom=False) else: fig, ax = plt.subplots(numvars - 1, numvars - 1, **backend_kwargs) else: if marginals: assert ax.shape == (numvars, numvars) if ax[0, 1] is not None and ax[0, 1].get_figure() is not None: ax[0, 1].remove() ax_return = ax ax_hist_x = ax[0, 0] ax_hist_y = ax[1, 1] ax = ax[1, 0] for val, ax_, rotate in ((x, ax_hist_x, False), (y, ax_hist_y, True)): plot_dist(val, textsize=xt_labelsize, rotated=rotate, ax=ax_, **marginal_kwargs) else: ax = np.atleast_2d(ax)[0, 0] if "scatter" in kind: ax.plot(infdata_group[0], infdata_group[1], **scatter_kwargs) if "kde" in kind: plot_kde(infdata_group[0], infdata_group[1], ax=ax, **kde_kwargs) if "hexbin" in kind: hexbin = ax.hexbin( infdata_group[0], infdata_group[1], gridsize=gridsize, **hexbin_kwargs, ) ax.grid(False) if kind == "hexbin" and colorbar: cbar = ax.figure.colorbar(hexbin, ticks=[hexbin.norm.vmin, hexbin.norm.vmax], ax=ax) cbar.ax.set_yticklabels(["low", "high"], fontsize=ax_labelsize) if divergences: ax.plot( infdata_group[0][diverging_mask], infdata_group[1][diverging_mask], **divergences_kwargs, ) if point_estimate: pe_x = calculate_point_estimate(point_estimate, x) pe_y = calculate_point_estimate(point_estimate, y) if marginals: ax_hist_x.axvline(pe_x, **point_estimate_kwargs) ax_hist_y.axhline(pe_y, **point_estimate_kwargs) ax.axvline(pe_x, **point_estimate_kwargs) ax.axhline(pe_y, **point_estimate_kwargs) ax.scatter(pe_x, pe_y, **point_estimate_marker_kwargs) if reference_values: ax.plot( reference_values_copy[flat_var_names[0]], reference_values_copy[flat_var_names[1]], **reference_values_kwargs, ) ax.set_xlabel("{}".format(flat_var_names[0]), fontsize=ax_labelsize, wrap=True) ax.set_ylabel("{}".format(flat_var_names[1]), fontsize=ax_labelsize, wrap=True) ax.tick_params(labelsize=xt_labelsize) else: not_marginals = int(not marginals) num_subplot_cols = numvars - not_marginals max_plots = ( num_subplot_cols ** 2 if rcParams["plot.max_subplots"] is None else rcParams["plot.max_subplots"] ) cols_to_plot = np.sum(np.arange(1, num_subplot_cols + 1).cumsum() <= max_plots) if cols_to_plot < num_subplot_cols: vars_to_plot = cols_to_plot warnings.warn( "rcParams['plot.max_subplots'] ({max_plots}) is smaller than the number " "of resulting pair plots with these variables, generating only a " "{side}x{side} grid".format(max_plots=max_plots, side=vars_to_plot), UserWarning, ) else: vars_to_plot = numvars - not_marginals (figsize, ax_labelsize, _, xt_labelsize, _, markersize) = _scale_fig_size( figsize, textsize, vars_to_plot, vars_to_plot ) backend_kwargs.setdefault("figsize", figsize) point_estimate_marker_kwargs.setdefault("s", markersize + 50) if ax is None: fig, ax = plt.subplots( vars_to_plot, vars_to_plot, **backend_kwargs, ) hexbin_values = [] for i in range(0, vars_to_plot): var1 = infdata_group[i] for j in range(0, vars_to_plot): var2 = infdata_group[j + not_marginals] if i > j: if ax[j, i].get_figure() is not None: ax[j, i].remove() continue elif i == j and marginals: loc = "right" plot_dist(var1, ax=ax[i, j], **marginal_kwargs) else: if i == j: loc = "left" if "scatter" in kind: ax[j, i].plot(var1, var2, **scatter_kwargs) if "kde" in kind: plot_kde( var1, var2, ax=ax[j, i], **kde_kwargs, ) if "hexbin" in kind: ax[j, i].grid(False) hexbin = ax[j, i].hexbin(var1, var2, gridsize=gridsize, **hexbin_kwargs) if divergences: ax[j, i].plot( var1[diverging_mask], var2[diverging_mask], **divergences_kwargs ) if kind == "hexbin" and colorbar: hexbin_values.append(hexbin.norm.vmin) hexbin_values.append(hexbin.norm.vmax) divider = make_axes_locatable(ax[-1, -1]) cax = divider.append_axes(loc, size="7%", pad="5%") cbar = fig.colorbar( hexbin, ticks=[hexbin.norm.vmin, hexbin.norm.vmax], cax=cax ) cbar.ax.set_yticklabels(["low", "high"], fontsize=ax_labelsize) if point_estimate: pe_x = calculate_point_estimate(point_estimate, var1) pe_y = calculate_point_estimate(point_estimate, var2) ax[j, i].axvline(pe_x, **point_estimate_kwargs) ax[j, i].axhline(pe_y, **point_estimate_kwargs) if marginals: ax[j - 1, i].axvline(pe_x, **point_estimate_kwargs) pe_last = calculate_point_estimate(point_estimate, infdata_group[-1]) ax[-1, -1].axvline(pe_last, **point_estimate_kwargs) ax[j, i].scatter(pe_x, pe_y, **point_estimate_marker_kwargs) if reference_values: x_name = flat_var_names[i] y_name = flat_var_names[j + not_marginals] if x_name and y_name not in difference: ax[j, i].plot( reference_values_copy[x_name], reference_values_copy[y_name], **reference_values_kwargs, ) if j != vars_to_plot - 1: ax[j, i].axes.get_xaxis().set_major_formatter(NullFormatter()) else: ax[j, i].set_xlabel( "{}".format(flat_var_names[i]), fontsize=ax_labelsize, wrap=True ) if i != 0: ax[j, i].axes.get_yaxis().set_major_formatter(NullFormatter()) else: ax[j, i].set_ylabel( "{}".format(flat_var_names[j + not_marginals]), fontsize=ax_labelsize, wrap=True, ) ax[j, i].tick_params(labelsize=xt_labelsize) if backend_show(show): plt.show() if marginals and numvars == 2: return ax_return return ax
def doWork(args): """ Main wrapper""" sorted_dates = sorted(viral_samples.values()) # open a file of contig stats and get the contig lengths, rel abundance contig_stats = defaultdict(dict) with open(args.contigs) as fp: callback = None if args.filter is not None: callback = fx_include for name, seq, qual in fx_parse(fp, callback=callback, headers=args.filter): contig_stats[name]['length'] = len(seq) contig_stats[name]['spacers'] = [] contig_stats[name]['snps'] = [] contig_stats[name]['rel_abs'] = [] # open the vcf file containing SNPs get the timepoints that the SNP is # present vcf_reader = vcf.Reader(open(args.snps)) for record in vcf_reader: if record.QUAL is not None and int(record.QUAL) < int( args.snp_quality): continue if record.CHROM not in contig_stats: continue for sample in record.samples: if sample['GT'] != '0/0' and sample['GT'] != './.': try: contig_stats[record.CHROM]['snps'].append([ record.POS, viral_samples[os.path.splitext(sample.sample)[0]] ]) except: contig_stats[record.CHROM]['snps'] = [] contig_stats[record.CHROM]['snps'].append([ record.POS, viral_samples[os.path.splitext(sample.sample)[0]] ]) # open the Spacer file get the positions and timepoints with open(args.spacers) as sp: for line in sp: line = line.rstrip() fields = line.split('\t') if fields[0].startswith('M8') or fields[1] not in contig_stats: continue ret = get_spacer_positions(fields) try: contig_stats[fields[1]]['spacers'].append( [ret.start, ret.timepoint]) except: contig_stats[fields[1]]['spacers'] = [] contig_stats[fields[1]]['spacers'].append( [ret.start, ret.timepoint]) # open relative abundance file and add that data in with open(args.rel_abs) as ra: rel_abs_header = True header = [] times = [] for line in ra: line = line.rstrip() fields = line.split('\t') if rel_abs_header: times = [viral_samples[x] for x in fields[1:]] rel_abs_header = False elif fields[0] in contig_stats: contig_stats[fields[0]]['rel_abs'] = zip(fields[1:], times) #----- # make a 2d plot nullfmt = NullFormatter() for contig_name, data in contig_stats.items(): # definitions for the axes left, width = 0.1, 0.65 bottom, height = 0.1, 0.65 left_h = left + width + 0.02 rect_scatter = [left, bottom, width, height] rect_histy = [left_h, bottom, 0.2, height] fig = plt.figure() axScatter = fig.add_axes(rect_scatter) axHisty = fig.add_axes(rect_histy) #axScatter = plt.axes(rect_scatter) #axHisty = plt.axes(rect_histy) axHisty.set_xlabel('relative abundance') axHisty.yaxis.set_major_formatter(nullfmt) axHisty.set_ylim((mpl.dates.date2num(sorted_dates[0]), mpl.dates.date2num(sorted_dates[-1]))) axHisty.xaxis.set_major_locator(MaxNLocator(4)) #phi_rel_abundance = np.random.randn(len(sorted_dates)) #ra_point = [x[0] for x in data['rel_abs']] ra_time = [x[1] for x in data['rel_abs']] ra_time = mpl.dates.date2num(ra_time) # sorted(ra_time) sorted_indexes = np.argsort(ra_time) ra_point = [] for i in sorted_indexes: ra_point.append(data['rel_abs'][i][0]) axHisty.plot_date(ra_point, sorted(ra_time), xdate=False, ydate=True, color='0.7', marker=' ', linestyle='--') #ax.xaxis.set_ticks_position('top') axScatter.set_xlim([0, data['length']]) axScatter.set_ylim((mpl.dates.date2num(sorted_dates[0]), mpl.dates.date2num(sorted_dates[-1]))) axScatter.set_xlabel('genome position (bp)') axScatter.set_title(contig_name) sp_dates = [x[1] for x in data['spacers']] sp_dates = mpl.dates.date2num(sp_dates) sp_points = [x[0] for x in data['spacers']] axScatter.plot_date(sp_points, sp_dates, color='r', alpha=0.5, xdate=False, ydate=True) sn_dates = [x[1] for x in data['snps']] sn_dates = mpl.dates.date2num(sn_dates) sn_points = [x[0] for x in data['snps']] axScatter.plot_date(sn_points, sn_dates, color='0.7', marker='.', xdate=False, ydate=True) axScatter.tick_params(axis='y', labelsize='small') axHisty.tick_params(axis='y', labelsize='small') # # Change the formatting of the xlabels to make them pretty # labels = axScatter.get_xticklabels() for label in labels: label.set_rotation(30) label.set_horizontalalignment('right') label.set_size('small') labels = axHisty.get_xticklabels() for label in labels: label.set_rotation(30) label.set_horizontalalignment('right') label.set_size('small') plt.savefig(os.path.join(args.output, contig_name + ".png"), dpi=300, format='png') #----- # clean up! plt.close(fig) #plt.close(axHisty) del fig return 0
lr=0.00005) n = 0 for (masked, mask), ori in tqdm(test_generator): # Run predictions for this batch of images pred_img = model.predict([masked, mask]) pred_time = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') # Clear current output and display test images for i in range(len(ori)): _, axes = plt.subplots(1, 2, figsize=(10, 5)) axes[0].imshow(masked[i, :, :, :]) axes[1].imshow(pred_img[i, :, :, :] * 1.) axes[0].set_title('Masked Image') axes[1].set_title('Predicted Image') axes[0].xaxis.set_major_formatter(NullFormatter()) axes[0].yaxis.set_major_formatter(NullFormatter()) axes[1].xaxis.set_major_formatter(NullFormatter()) axes[1].yaxis.set_major_formatter(NullFormatter()) plt.savefig(r'data/test_samples/img_{}_{}.png'.format(i, pred_time)) plt.close() n += 1 # Only create predictions for about 100 images if n > 100: break print("finish")
def make_plot(self, classifier, show, save, compression_keys, topk=1, make_title=False, xmin=0.032, xmax=None, ymin=0, ymax=100, figsize=(6, 3.75), logscale=True, font_name='times_roman', legend_loc='lower right', bpp_min=None): if compression_keys is None: raise ValueError('compression_keys must not be None') if not isinstance(compression_keys, list): compression_keys = list([compression_keys]) if int(topk) != 1 and int(topk) != 5: raise ValueError('topk must either be `1` or `5`') topkacc_kw = LogsParser.TOP1ACC_KW if int( topk) == 1 else LogsParser.TOP5ACC_KW topkacc_idx = 1 if int(topk) == 1 else 2 configs = { ckey: self.load_config(ckey, classifier) for ckey in compression_keys } csv_original = os.path.join( self._csv_dir, CSV_FILE_BASE.format('original', classifier)) corrupted_logfiles = [] if os.path.isfile(csv_original): acc_data_lossless = np.array(read_csv(csv_original), dtype=np.float32) else: print('WARNING! {} not found.'.format(csv_original)) corrupted_logfiles.append('original') acc_data_lossless = None # ========= parse csv files parsed_data = {} for key in compression_keys: cfg = configs[key] csv_file = cfg['csv_file'] if not os.path.isfile(csv_file): print('WARNING! {} not found.'.format(csv_file)) corrupted_logfiles.append(key) continue acc_data = np.array(read_csv(csv_file), dtype=np.float32) # sort data acc_data = acc_data[acc_data[:, 0].argsort()] if bpp_min is not None: include_idx = np.where(acc_data[:, 0] >= float(bpp_min))[0] acc_data = acc_data[include_idx, :] parsed_data[key] = acc_data[:, 0], 100.0 * acc_data[:, topkacc_idx] if len(parsed_data) == 0: print('parsed_data empty') return compression_keys = list( [k for k in compression_keys if k not in corrupted_logfiles]) configs = {k: configs[k] for k in compression_keys} compression_colors = self.keys_to_color([k for k in compression_keys]) # ========= make plot # determine plot boundaries all_bpp_data = np.unique( np.concatenate([data[0] for data in parsed_data.values()])) if xmax is None: xmax = all_bpp_data.max() + 2.0 x_lim = xmin, xmax y_lim = ymin, ymax # setup fig fig = plt.figure(figsize=figsize) # plot data ax = plt.gca() plot_func = ax.semilogx if logscale else ax.plot for key in compression_keys: bpp_array, acc_array = parsed_data[key] cfg = configs[key] plot_func(bpp_array, acc_array, lw=cfg['lw'], color=compression_colors[key], label=COMPRESSION_NAMES[key], marker=cfg['marker'], markersize=3 * cfg['lw'], linestyle=cfg['linestyle']) if acc_data_lossless is not None: ax.axhline(100.0 * acc_data_lossless[0, topkacc_idx], xmin=0, xmax=24, color='dimgrey', lw=0.75, linestyle='--') plot_func(acc_data_lossless[:, 0], 100.0 * acc_data_lossless[:, topkacc_idx], marker='^', markersize=6, color='red') # format plot plt.xlim(x_lim) plt.ylim(y_lim) ax.set_xlabel('bpp', fontproperties=get_font(font_name, FONTSIZES.Large)) ax.set_ylabel('Validation Accuracy (%)', fontproperties=get_font(font_name, FONTSIZES.Large)) ax.grid(True, color=(0.91, 0.91, 0.91), linewidth=0.5) ax.yaxis.set_minor_locator(MultipleLocator(5)) ax.yaxis.set_major_locator(MultipleLocator(10)) ax.yaxis.set_major_formatter(FormatStrFormatter('%.1d')) if logscale: ax.xaxis.set_minor_locator( LogLocator(base=2, subs=(1.2, 1.4, 1.6, 1.8))) ax.xaxis.set_minor_formatter(NullFormatter()) ax.xaxis.set_major_locator(LogLocator(base=2)) ax.xaxis.set_major_formatter(FormatStrFormatter('%.3f')) else: ax.xaxis.set_major_locator(MultipleLocator(0.125)) ax.xaxis.set_major_formatter(FormatStrFormatter('%.3f')) ax.xaxis.set_minor_locator(MultipleLocator(0.125)) ax.tick_params(which='minor', width=0.4) ax.set_facecolor(FACECOLOR) for label in ax.get_xticklabels(): label.set_fontproperties(get_font(font_name, FONTSIZES.large)) for label in ax.get_yticklabels(): label.set_fontproperties(get_font(font_name, FONTSIZES.large)) for spine in ['top', 'bottom', 'left', 'right']: ax.spines[spine].set_color('black') # legend legend_labels = [COMPRESSION_NAMES[k] for k in compression_keys] legend_labels += ['Original'] legend = plt.legend( **self._get_legend_kwargs(configs=[(k, configs[k]) for k in compression_keys], labels=legend_labels, legend_loc=legend_loc, font_name=font_name, compression_colors=compression_colors)) ax.add_artist(legend) # title if make_title: plt.suptitle(t='Validation Accuracy on {}, Top-{}, %'.format( DATASET_NAMES[self._dataset], 1 if topkacc_kw == LogsParser.TOP1ACC_KW else 5), fontproperties=get_font(font_name, FONTSIZES.Large)) plt.title(CLASSIFIER_NAMES[classifier], fontproperties=get_font(font_name, FONTSIZES.large)) plt.subplots_adjust(left=0.08, right=0.97, bottom=0.12, top=0.86) else: fig.tight_layout() if show: plt.show() if save: if not os.path.exists(self._plots_save_dir): os.makedirs(self._plots_save_dir) save_as = '{}_accuracy_{}_{}.png'.format(self._dataset, classifier, topkacc_kw) fig.savefig(os.path.join(self._plots_save_dir, save_as), dpi=200) print('plot saved as {}'.format( os.path.join(self._plots_save_dir, save_as))) plt.close(fig)
def on_draw(self, gr_xmin=None, gr_xmax=None, gr_ymin=None, gr_ymax=None, iq_begin=0): """Redraws the figure""" self.fig.clear() if gr_xmin==None : xmin = self.arrx[0] else : xmin = gr_xmin if gr_xmax==None : xmax = self.arrx[-1] # Last element else : xmax = gr_xmax if xmin==xmax : xmax=xmin+1 # protection against equal limits wwidth = 0.26 wheight = 0.24 self.list_of_axgr = [] #iq_begin = 5 iq_list = self.get_iq_list(iq_begin) #print 'iq_list:', iq_list, ' at self.iq_max =',self.arr_q.shape[0] for iwin, iq in enumerate(iq_list) : iwin_row = int(iwin/3) iwin_col = int(iwin%3) wx0 = 0.08 + iwin_col*0.32 wy0 = 0.70 - iwin_row*0.3 xarr = self.arrx yarr = self.arr_g2[:,iq] q_ave = self.arr_q[iq] q_str = 'q(%d)=%8.4f' % (iq, q_ave) if gr_ymin==None : ymin = min(yarr) else : ymin = gr_ymin if gr_ymax==None : ymax = max(yarr) else : ymax = gr_ymax axgr = self.fig.add_axes([wx0, wy0, wwidth, wheight]) if self.logIsOn : axgr.set_xscale('log') else : axgr.xaxis.set_major_locator(MaxNLocator(5)) axgr.plot(xarr, yarr, '-bo')# '-ro' axgr.set_xlim(xmin,xmax) axgr.set_ylim(ymin,ymax) axgr.set_title(q_str, fontsize=10, color='b') axgr.tick_params(axis='both', which='major', labelsize=8) axgr.yaxis.set_major_locator(MaxNLocator(5)) axgr.grid(self.gridIsOn) if iwin_col == 0 : axgr.set_ylabel(r'$g_{2}$', fontsize=14) if iwin_row == 2 : axgr.set_xlabel(r'$\tau$ (in number of frames)', fontsize=12) else : axgr.xaxis.set_major_formatter(NullFormatter()) self.list_of_axgr.append(axgr) self.canvas.draw()
def plot_pnet_vs_dense_with_ratio(ax, c, label, plot_ratio=False): sns.set_color_codes('muted') current_palette = sns.color_palette() color = current_palette[3] sizes = [] for i in range(0, 20, 3): df_split = pd.read_csv(join(PROSTATE_DATA_PATH, 'splits/training_set_{}.csv'.format(i)), index_col=0) sizes.append(df_split.shape[0]) sizes = np.array(sizes) df_dense_sameweights = get_dense_sameweights(c) df_pnet = get_pnet_preformance(col=c) pvalues = get_stats(df_pnet, df_dense_sameweights) print c, zip(pvalues, sizes) plot_compaison(ax, label, df_pnet, df_dense_sameweights) # ax.legend(ax.legend.text, loc= 'upper left') y1 = df_pnet.mean() y2 = df_dense_sameweights.mean() height = map(max, zip(y1, y2)) print 'height', height updated_values = [] for i, (p, s) in enumerate(zip(pvalues, sizes)): if p >= 0.05: displaystring = r'n.s.' elif p < 0.0001: displaystring = r'***' elif p < 0.001: displaystring = r'**' else: displaystring = r'*' updated_values.append('{:.0f}\n({})'.format(s, displaystring)) ax.axvline(x=s, ymin=0, linestyle='--', alpha=0.3) ax.set_xscale("log") ax.set_xticks([], []) ax.xaxis.set_major_formatter(NullFormatter()) ax.xaxis.set_minor_formatter(NullFormatter()) ax.tick_params(axis=u'x', which=u'both', length=0) ax.set_xticks(sizes) ax.set_xticklabels(updated_values) ax.set_xlim((min(sizes) - 5, max(sizes) + 50)) if plot_ratio: ax2 = ax.twinx() y1 = df_pnet.mean() y2 = df_dense_sameweights.mean() ratio = (y1.values - y2.values) / y2.values new_x = np.linspace(min(sizes), max(sizes), num=np.size(sizes)) coefs = np.polyfit(sizes, ratio, 3) new_line = np.polyval(coefs, new_x) ax2.plot(new_x, new_line, '-.', linewidth=1, color=color) ax2.set_ylim((0.005, .23)) ax.set_ylim((.5, 1.05)) ax2.set_ylabel('Performance increase', fontdict=dict(family='Arial', weight='bold', fontsize=14, color=color)) vals = ax2.get_yticks() ax2.set_yticklabels(['{:,.0%}'.format(x) for x in vals]) ax2.set_yticklabels(['{:,.0%}'.format(x) for x in vals]) ax.set_yticks([], minor=True) ax2.spines['right'].set_color(color) ax2.yaxis.label.set_color(color) ax2.tick_params(axis='y', colors=color) ax2.spines['top'].set_visible(False) ax2.spines['right'].set_visible(False) ax2.spines['left'].set_visible(False) ax2.spines['bottom'].set_visible(False) ax.set_xlabel('Number of samples', fontdict=dict(family='Arial', weight='bold', fontsize=14)) size_vals = ax.get_xticks() pvalues_dict = {} for p, s in zip(pvalues, sizes): pvalues_dict[s] = p return pvalues_dict
def main(): import numpy as np #str="features_all_without_skeleton" str = "features_all_compress5" data = np.genfromtxt(str + ".csv", delimiter=",") #str="multipliers" #data=np.genfromtxt("optdigits.tes",delimiter=",") X = data[:, :-1] Y = data[:, -1] size = X.shape[0] print "++++++++++++++++++++Affinity propagation+++++++++++++" from sklearn.cluster import AffinityPropagation # Compute Affinity Propagation af = AffinityPropagation(preference=-50).fit(X) cluster_centers_indices = af.cluster_centers_indices_ labels_af = af.labels_ n_clusters_ = len(cluster_centers_indices) print "number of cluster is: ", n_clusters_ labels_af = test_labels(labels_af, Y) cm = calculate_confusion(labels_af, Y) print " Fowlkes Mallows index index is: ", calculate_fm(cm, size) np.savez(str + "/af_all", n_cluster=n_clusters_, center=af.cluster_centers_, center_labels=labels_af[af.cluster_centers_indices_]) # Plot result import numpy as np import matplotlib.pyplot as plt from matplotlib.ticker import NullFormatter from sklearn import manifold, datasets from sklearn.decomposition import PCA n_neighbors = 30 n_components = 2 fig = plt.figure(figsize=(15, 8)) plt.suptitle("Manifold Learning with %i points, %i neighbors, t-SNE" % (size, n_neighbors), fontsize=14) color = Y xx = manifold.TSNE(n_components=2, init='pca', random_state=0).fit_transform(X) ax = fig.add_subplot(121) plt.scatter(xx[:, 0], xx[:, 1], c=color, cmap=plt.cm.Spectral) plt.title("data with true label") ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter()) plt.axis('tight') color = labels_af xx = manifold.TSNE(n_components=2, init='pca', random_state=0).fit_transform(X) ax = fig.add_subplot(122) plt.scatter(xx[:, 0], xx[:, 1], c=color, cmap=plt.cm.Spectral) plt.title("data with clustering label") ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter()) plt.axis('tight') plt.show()
def dashboard(self, xname, yname1, yname2, zname=False, time_range=False, fit=False, unpickle=False, masks=False, plot=False, print=False, plot_anomaly=False, set_outlier=False, **kwargs): fig, axes = plt.subplots(4, 3, figsize=(20, 12), gridspec_kw={ 'width_ratios': [2, 6, 1], 'height_ratios': [1.5, 1, 1, 1] }) fig.tight_layout() nullfmt = NullFormatter() # no labels df = self.met_df.copy() if time_range is not False: df = df[time_range[0]:time_range[1]] # create masked versions of the vars x = ma.masked_invalid(df[xname]) y1, y2 = ma.masked_invalid(df[yname1]), ma.masked_invalid(df[yname2]) y = y1 / y2 if zname is False: zname = 'temp_avg_1a' z = ma.masked_invalid(df[zname]) t = ma.array(df.index) # get, plot, & print mask for all specified masks # d_masked is a dict with a count of no. of masked points mask, d_masked = self.process_masks(x, y1, y2, t, axes=axes, masks=('sensorBounds', 'flatlining'), plot=plot, print=print) # # reconstruct x, y, z, t with the returned mask # x, y, z, t = ma.array(x, mask=mask), ma.array(y, mask=mask), \ # ma.array(z, mask=mask), ma.array(t, mask=mask) ### PLOT 1 -- winddirection vs. windspeed ratio ax = axes[0, 0] ax.set_xlabel('Wind Direction (deg)') ax.set_xlim(0, 360) ax.xaxis.set_ticks(np.arange(0, 361, 45)) ax.set_ylabel('Windspeed Ratio') ax.set_ylim(0.5, 1.5) zrange = [-30, 40] # mask out small speeds for fitting purposes mask_minSpeed = self.combine_masks(self.mm.mask_minSpeed(y1), self.mm.mask_minSpeed(y2)) mask1 = self.combine_masks(mask, mask_minSpeed) # fit the data -- scikit learn ignores masks, so beware if fit in ('knn', 'k'): # check if n_neighbors, cv are in kwargs nn = kwargs['n_neighbors'] if 'n_neighbors' in kwargs else None cv = kwargs['cv'] if 'cv' in kwargs else None scoring = kwargs['scoring'] if 'scoring' in kwargs else None # do the fit x_fit, y_fit, sigma = mfit.fit_knn( ma.compressed(ma.array(x, mask=mask1)), ma.compressed(ma.array(y, mask=mask1)), axes=axes, n_neighbors=nn, cv=cv, scoring=scoring, unpickle=unpickle) elif fit in ('lowess', 'l'): x_fit, y_fit = mfit.fit_lowess(x, y) elif fit in ('savgol', 's'): x_fit, y_fit = x, mfit.fit_savgol(y) # sort the returned arrays for plotting purposes indx = x_fit.argsort() # confidence interval CI = 1.9600 * sigma[indx] # 2-sigma CI ax.fill_between(x_fit[indx], y_fit[indx] + CI, y_fit[indx] - CI, alpha=0.3, color='darkorange', edgecolor='', label='95% CI') # plot the fit ax.plot(x_fit[indx], y_fit[indx], alpha=1, color='red', label=fit) ax.legend(bbox_to_anchor=(0.2, -0.5), loc=3, borderaxespad=0.) # plot the mask_minSpeed data ym = ma.array(y, mask=mask_minSpeed) ax.scatter(x[ym.mask == True], y[ym.mask == True], color='black', marker='o', s=10, alpha=0.3) # plot the data ax.scatter(ma.array(x, mask=mask1), ma.array(y, mask=mask1), color='b', marker='+', s=10, alpha=0.5) # plot the color bar # zname = 'Temperature (C)' # cbar_ax = fig.add_axes([0.02, 0.05, 0.02, 0.60]) # cbar_ticks = np.arange(zrange[0], zrange[1]+10, 10) # cbar = fig.colorbar(im, cax=cbar_ax, ticks=cbar_ticks) # cbar.set_label('%s' %zname, rotation=270) #self.plot_cbar(fig, im, zrange) #fig.subplots_adjust(right=0.92) ### PLOT 2 ax = axes[0, 1] # now, plot the normalized deviation -- anomaly ax.set_ylabel('Normalized Deviation') yrange = (-5, 5) ax.set_ylim(yrange) ax.set_xlim(time_range[0], time_range[1]) ax.set_yticks(np.arange(yrange[0], yrange[1] + 0.1, 1)) # calculate the normalized deviation -- anomaly norm_dev = ma.array( ((ma.compressed(ma.array(y, mask=mask1)) - y_fit) / sigma)) #plot ax.scatter(ma.compressed(ma.array(t, mask=mask1)), norm_dev, marker='+', s=10, alpha=0.5) if 'outliers': label = 'mask_outliers' if set_outlier is True: maxPercentile = 99 maxDev = np.percentile(np.abs(norm_dev), maxPercentile) print( '\t\tThe 3-sigma (>%2i%%) norm deviation is: %5.2f. Set for production.' % (maxPercentile, maxDev)) ## pickle maxDev?? else: # we need to decide which of the three maxDev to use. # The easiest would be to pickle all three and use a different maxDev for each height # for now, let this be the palce holder maxDev = 4.28 mask4 = self.mm.mask_outliers(norm_dev, maxDev=maxDev) if print is True: d_masked['outliers'] = ma.count_masked( ma.array(norm_dev, mask=mask4)) self.print_mask_stats(d_masked, len(norm_dev)) ### PLOT 3 # plot the histogram on the side ax = axes[0, 2] ax.set_ylim(yrange) ax.set_yticks(np.arange(yrange[0], yrange[1] + 0.1, 1)) ax.set_xlim((1, 10000)) # now determine nice limits by hand: binwidth = 0.2 bins = np.arange(yrange[0], yrange[1], binwidth) # only plot +/-5 for hist ax.hist(norm_dev[np.abs(norm_dev) < 5], bins=bins, orientation='horizontal') ax.set_xscale('log') ### PLOT 4-6 -- time series self.plot_ts(t, x, z, mask, 'wdir_avg', ax=axes[1, 1], time_range=time_range) self.plot_ts(t, y1, z, mask, 'wspd_avg', ax=axes[2, 1], time_range=time_range) self.plot_ts(t, y2, z, mask, 'wspd_avg', ax=axes[3, 1], time_range=time_range) ### hide the other four panels for now axes[1, 0].axis('off') #axes[2, 0].axis('off') axes[3, 0].axis('off') axes[1, 2].axis('off') axes[2, 2].axis('off') axes[3, 2].axis('off') return mask
def main(dictAlg, order=None, outputdir='.', info='default', dimension=None, parentHtmlFileName=None, plotType=PlotType.ALG, settings=genericsettings): """Generates a figure showing the performance of algorithms. From a dictionary of :py:class:`DataSetList` sorted by algorithms, generates the cumulative distribution function of the bootstrap distribution of aRT for algorithms on multiple functions for multiple targets altogether. :param dict dictAlg: dictionary of :py:class:`DataSetList` instances one instance is equivalent to one algorithm, :param list targets: target function values :param list order: sorted list of keys to dictAlg for plotting order :param str outputdir: output directory :param str info: output file name suffix :param str parentHtmlFileName: defines the parent html page """ global x_limit # late assignment of default, because it can be set to None in config global divide_by_dimension # not fully implemented/tested yet if 'x_limit' not in globals() or x_limit is None: x_limit = x_limit_default tmp = pp.dictAlgByDim(dictAlg) algorithms_with_data = [a for a in dictAlg.keys() if dictAlg[a] != []] algorithms_with_data.sort() if len(algorithms_with_data) > 1 and len(tmp) != 1 and dimension is None: raise ValueError( 'We never integrate over dimension for more than one algorithm.') if dimension is not None: if dimension not in tmp.keys(): raise ValueError('dimension %d not in dictAlg dimensions %s' % (dimension, str(tmp.keys()))) tmp = {dimension: tmp[dimension]} dimList = list(tmp.keys()) # The sort order will be defined inside this function. if plotType == PlotType.DIM: order = [] # Collect data # Crafting effort correction: should we consider any? CrEperAlg = {} for alg in algorithms_with_data: CrE = 0. if 1 < 3 and dictAlg[alg][0].algId == 'GLOBAL': tmp = dictAlg[alg].dictByNoise() assert len(tmp.keys()) == 1 if list(tmp.keys())[0] == 'noiselessall': CrE = 0.5117 elif list(tmp.keys())[0] == 'nzall': CrE = 0.6572 if plotType == PlotType.DIM: for dim in dimList: keyValue = '%d-D' % (dim) CrEperAlg[keyValue] = CrE elif plotType == PlotType.FUNC: tmp = pp.dictAlgByFun(dictAlg) for f, dictAlgperFunc in tmp.items(): keyValue = 'f%d' % (f) CrEperAlg[keyValue] = CrE else: CrEperAlg[alg] = CrE if CrE != 0.0: print('Crafting effort for', alg, 'is', CrE) dictData = {} # list of (ert per function) per algorithm dictMaxEvals = {} # list of (maxevals per function) per algorithm # funcsolved = [set()] * len(targets) # number of functions solved per target xbest = [] maxevalsbest = [] target_values = testbedsettings.current_testbed.pprldmany_target_values dictDimList = pp.dictAlgByDim(dictAlg) dims = sorted(dictDimList) for i, dim in enumerate(dims): divisor = dim if divide_by_dimension else 1 dictDim = dictDimList[dim] dictFunc = pp.dictAlgByFun(dictDim) for f, dictAlgperFunc in sorted(dictFunc.items()): # print(target_values((f, dim))) for j, t in enumerate(target_values((f, dim))): # for j, t in enumerate(testbedsettings.current_testbed.ecdf_target_values(1e2, f)): # funcsolved[j].add(f) for alg in sorted(algorithms_with_data): x = [np.inf] * perfprofsamplesize runlengthunsucc = [] try: entry = dictAlgperFunc[alg][ 0] # one element per fun and per dim. evals = entry.detEvals([t])[0] assert entry.dim == dim runlengthsucc = evals[np.isnan(evals) == False] / divisor runlengthunsucc = entry.maxevals[np.isnan( evals)] / divisor if len(runlengthsucc) > 0: x = toolsstats.drawSP( runlengthsucc, runlengthunsucc, percentiles=[50], samplesize=perfprofsamplesize)[1] except (KeyError, IndexError): # set_trace() warntxt = ( 'Data for algorithm %s on function %d in %d-D ' % (alg, f, dim) + 'are missing.\n') warnings.warn(warntxt) keyValue = alg if plotType == PlotType.DIM: keyValue = '%d-D' % (dim) if keyValue not in order: order.append(keyValue) elif plotType == PlotType.FUNC: keyValue = 'f%d' % (f) dictData.setdefault(keyValue, []).extend(x) dictMaxEvals.setdefault(keyValue, []).extend(runlengthunsucc) displaybest = plotType == PlotType.ALG if displaybest: # set_trace() refalgentries = bestalg.load_reference_algorithm( testbedsettings.current_testbed. reference_algorithm_filename) if not refalgentries: displaybest = False else: refalgentry = refalgentries[(dim, f)] refalgevals = refalgentry.detEvals(target_values((f, dim))) # print(refalgevals) for j in range(len(refalgevals[0])): if refalgevals[1][j]: evals = refalgevals[0][j] # set_trace() assert dim == refalgentry.dim runlengthsucc = evals[np.isnan(evals) == False] / divisor runlengthunsucc = refalgentry.maxevals[ refalgevals[1][j]][np.isnan(evals)] / divisor x = toolsstats.drawSP( runlengthsucc, runlengthunsucc, percentiles=[50], samplesize=perfprofsamplesize)[1] else: x = perfprofsamplesize * [np.inf] runlengthunsucc = [] xbest.extend(x) maxevalsbest.extend(runlengthunsucc) if order is None: order = dictData.keys() # Display data lines = [] if displaybest: args = { 'ls': '-', 'linewidth': 6, 'marker': 'D', 'markersize': 11., 'markeredgewidth': 1.5, 'markerfacecolor': refcolor, 'markeredgecolor': refcolor, 'color': refcolor, 'label': testbedsettings.current_testbed.reference_algorithm_displayname, 'zorder': -1 } lines.append( plotdata(np.array(xbest), x_limit, maxevalsbest, CrE=0., **args)) def algname_to_label(algname, dirname=None): """to be extended to become generally useful""" if isinstance(algname, (tuple, list)): # not sure this is needed return ' '.join([str(name) for name in algname]) return str(algname) plotting_style_list = ppfig.get_plotting_styles(order) for plotting_style in plotting_style_list: for i, alg in enumerate(plotting_style.algorithm_list): try: data = dictData[alg] maxevals = dictMaxEvals[alg] except KeyError: continue args = styles[i % len(styles)] args = args.copy() args['linewidth'] = 1.5 args['markersize'] = 12. args['markeredgewidth'] = 1.5 args['markerfacecolor'] = 'None' args['markeredgecolor'] = args['color'] args['label'] = algname_to_label(alg) if plotType == PlotType.DIM: args['marker'] = genericsettings.dim_related_markers[i] args['markeredgecolor'] = genericsettings.dim_related_colors[i] args['color'] = genericsettings.dim_related_colors[i] # args['markevery'] = perfprofsamplesize # option available in latest version of matplotlib # elif len(show_algorithms) > 0: # args['color'] = 'wheat' # args['ls'] = '-' # args['zorder'] = -1 # plotdata calls pprldistr.plotECDF which calls ppfig.plotUnifLog... which does the work args.update(plotting_style.pprldmany_styles) lines.append( plotdata(np.array(data), x_limit, maxevals, CrE=CrEperAlg[alg], **args)) labels, handles = plotLegend(lines, x_limit) if True: # isLateXLeg: if info: file_name = os.path.join( outputdir, '%s_%s.tex' % (genericsettings.pprldmany_file_name, info)) else: file_name = os.path.join( outputdir, '%s.tex' % genericsettings.pprldmany_file_name) with open(file_name, 'w') as file_obj: file_obj.write(r'\providecommand{\nperfprof}{7}') algtocommand = {} # latex commands for i, alg in enumerate(order): tmp = r'\alg%sperfprof' % pptex.numtotext(i) file_obj.write( r'\providecommand{%s}{\StrLeft{%s}{\nperfprof}}' % (tmp, toolsdivers.str_to_latex( toolsdivers.strip_pathname2(algname_to_label(alg))))) algtocommand[algname_to_label(alg)] = tmp if displaybest: tmp = r'\algzeroperfprof' refalgname = testbedsettings.current_testbed.reference_algorithm_displayname file_obj.write(r'\providecommand{%s}{%s}' % (tmp, refalgname)) algtocommand[algname_to_label(refalgname)] = tmp commandnames = [] for label in labels: commandnames.append(algtocommand[label]) # file_obj.write(headleg) if len( order ) > 28: # latex sidepanel won't work well for more than 25 algorithms, but original labels are also clipped file_obj.write( r'\providecommand{\perfprofsidepanel}{\mbox{%s}\vfill\mbox{%s}}' % (commandnames[0], commandnames[-1])) else: fontsize_command = r'\tiny{}' if len(order) > 19 else '' file_obj.write( r'\providecommand{\perfprofsidepanel}{{%s\mbox{%s}' % (fontsize_command, commandnames[0])) # TODO: check len(labels) > 0 for i in range(1, len(labels)): file_obj.write('\n' + r'\vfill \mbox{%s}' % commandnames[i]) file_obj.write('}}\n') # file_obj.write(footleg) if genericsettings.verbose: print('Wrote right-hand legend in %s' % file_name) if info: figureName = os.path.join( outputdir, '%s_%s' % (genericsettings.pprldmany_file_name, info)) else: figureName = os.path.join(outputdir, '%s' % genericsettings.pprldmany_file_name) # beautify(figureName, funcsolved, x_limit*x_annote_factor, False, fileFormat=figformat) beautify() if plotType == PlotType.FUNC: dictFG = pp.dictAlgByFuncGroup(dictAlg) dictKey = list(dictFG.keys())[0] functionGroups = dictAlg[list(dictAlg.keys())[0]].getFuncGroups() text = '%s\n%s, %d-D' % (testbedsettings.current_testbed.name, functionGroups[dictKey], dimList[0]) else: text = '%s %s' % (testbedsettings.current_testbed.name, ppfig.consecutiveNumbers(sorted(dictFunc.keys()), 'f')) if not (plotType == PlotType.DIM): text += ', %d-D' % dimList[0] # add information about smallest and largest target and their number text += '\n' targetstrings = target_values.labels() if isinstance(target_values, pp.RunlengthBasedTargetValues): text += (str(len(targetstrings)) + ' targets RLs/dim: ' + targetstrings[0] + '..' + targetstrings[len(targetstrings) - 1] + '\n') text += ' from ' + testbedsettings.current_testbed.reference_algorithm_filename else: text += (str(len(targetstrings)) + ' targets: ' + targetstrings[0] + '..' + targetstrings[len(targetstrings) - 1]) # add number of instances text += '\n' num_of_instances = [] for alg in algorithms_with_data: if ((alg in genericsettings.foreground_algorithm_list or alg[0] in genericsettings.foreground_algorithm_list[0] ) # case of a single algorithm only and len(dictAlgperFunc[alg]) > 0): num_of_instances.append( len((dictAlgperFunc[alg])[0].instancenumbers)) else: warnings.warn( 'The data for algorithm %s and function %s are missing' % (alg, f)) # issue a warning if number of instances is inconsistant, but always # display only the present number of instances, i.e. remove copies if len(set(num_of_instances)) > 1: warnings.warn( 'Number of instances inconsistent over all algorithms: %s instances found.' % str(num_of_instances)) num_of_instances = set(num_of_instances) for n in num_of_instances: text += '%d, ' % n text = text.rstrip(', ') text += ' instances' plt.text(0.01, 0.99, text, horizontalalignment="left", verticalalignment="top", transform=plt.gca().transAxes, fontsize=0.6 * label_fontsize) if len(dictFunc) == 1: plt.title(' '.join((str(list(dictFunc.keys())[0]), testbedsettings.current_testbed.short_names[list( dictFunc.keys())[0]])), fontsize=title_fontsize) a = plt.gca() plt.xlim(1e-0, x_limit) xmaxexp = int(np.floor(np.log10(x_limit))) xmajorticks = [10**exponent for exponent in range(0, xmaxexp + 1, 2)] xminorticks = [10**exponent for exponent in range(0, xmaxexp + 1)] def formatlabel(val, pos): labeltext = '{:d}'.format(int(round(np.log10(val)))) return labeltext a.xaxis.set_major_locator(FixedLocator(xmajorticks)) a.xaxis.set_major_formatter(FuncFormatter(formatlabel)) a.xaxis.set_minor_locator(FixedLocator(xminorticks)) a.xaxis.set_minor_formatter(NullFormatter()) if save_figure: ppfig.save_figure( figureName, dictAlg[algorithms_with_data[0]][0].algId, layout_rect=(0, 0, 0.735, 1), # Prevent clipping in matplotlib >=3: # Relative additional space numbers are # bottom, left, 1 - top, and 1 - right. # bottom=0.13 still clips g in the log(#evals) xlabel subplots_adjust=dict(bottom=0.135, right=0.735), ) if plotType == PlotType.DIM: file_name = genericsettings.pprldmany_file_name ppfig.save_single_functions_html( os.path.join(outputdir, file_name), '', # algorithms names are clearly visible in the figure htmlPage=ppfig.HtmlPage.NON_SPECIFIED, parentFileName='../%s' % parentHtmlFileName if parentHtmlFileName else None, header=ppfig.pprldmany_per_func_dim_header) if close_figure: plt.close()
fig = plt.figure(figsize=(6.5875, 6.2125)) ax = fig.add_subplot(111, projection='skewx') plt.grid(True) # Plot the data using normal plotting functions, in this case using # log scaling in Y, as dictated by the typical meteorological plot ax.semilogy(T, p, color='C3') ax.semilogy(Td, p, color='C2') # An example of a slanted line at constant X l = ax.axvline(0, color='C0') # Disables the log-formatting that comes with semilogy ax.yaxis.set_major_formatter(ScalarFormatter()) ax.yaxis.set_minor_formatter(NullFormatter()) ax.set_yticks(np.linspace(100, 1000, 10)) ax.set_ylim(1050, 100) ax.xaxis.set_major_locator(MultipleLocator(10)) ax.set_xlim(-50, 50) plt.show() ############################################################################# # # ------------ # # References # """"""""""
def plotacf(args, archive, pulsar, mjd, minFreq, maxFreq, length, extrapolx_f, extrapoly_f, fitxplot_f, extrapolx_t, extrapoly_t, fitxplot_t, midACF_freq, midACF_time, acf_mid, opt_f, opt_t, acffit_2D, f_end, t_end, mhzperbin, minperbin, Drift_rate, slope_visibility): nullfmt = NullFormatter() # no labels # definitions for the axes left_edge = 0.15 bottom_edge = 0.13 width = 0.5 height = 0.75 space = 0.005 rect_acf = [left_edge, bottom_edge, width, height - 0.21] rect_time = [left_edge, height - 0.07, width, 0.26] rect_freq = [ left_edge + width + space + 0.01, bottom_edge, 0.26, height - 0.21 ] plt.figure(1, figsize=(7, 6)) axacf = plt.axes(rect_acf) axtime = plt.axes(rect_time) axfreq = plt.axes(rect_freq) axacf.set_ylabel("frequency(Mhz)") axacf.imshow(acf_mid, aspect='auto', extent=[ -(length / 60) / 2, (length / 60) / 2, -(maxFreq - minFreq) / 2, (maxFreq - minFreq) / 2 ]) axacf.contour(acf_mid, aspect='auto', extent=[ -(length / 60) / 2, (length / 60) / 2, (maxFreq - minFreq) / 2, -(maxFreq - minFreq) / 2 ]) axacf.contour(acffit_2D, aspect='auto', extent=[ -(length / 60) / 2, (length / 60) / 2, (maxFreq - minFreq) / 2, -(maxFreq - minFreq) / 2 ]) axacf.set_xlabel("time(mins) \n Drift_rate=%.6f slope_visibility=%.6f" % (Drift_rate, slope_visibility)) t_y = np.linspace(np.min(midACF_time) - 0.1, 1.1 * np.max(midACF_time), 20) t_x = [] for i in range(20): t_x.append(np.sqrt(1 / opt_t)) axtime.set_title("%s MJD %s " % (pulsar, mjd), fontsize=12) axtime.xaxis.set_major_formatter(nullfmt) axtime.scatter(fitxplot_t, midACF_time) axtime.set_xlim(-(length / 60) / 2, (length / 60) / 2) timelen = len(midACF_time) axtime.set_ylim( np.min(midACF_time[int(0.25 * timelen):int(0.75 * timelen)]) - 0.1, np.max(midACF_time[int(0.25 * timelen):int(0.75 * timelen)]) + 0.1) axtime.plot(t_x, t_y, color='red', linewidth=4.0) axtime.plot(extrapolx_t, extrapoly_t, linewidth=4.0) axtime.locator_params(axis='y', tight=True, nbins=5) f_x = np.linspace(np.min(midACF_freq) - 0.1, 1.2 * np.max(midACF_freq), 20) f_y = [] for i in range(20): f_y.append(-np.sqrt(np.log(2) / opt_f)) axfreq.yaxis.set_major_formatter(nullfmt) axfreq.scatter(midACF_freq, fitxplot_f) freqlen = len(midACF_freq) axfreq.set_xlim( np.min(midACF_freq[int(0.25 * freqlen):int(0.75 * freqlen)]) - 0.1, 1.2 * np.max(midACF_freq[int(0.25 * freqlen):int(0.75 * freqlen)])) axfreq.set_ylim(-(maxFreq - minFreq) / 2, (maxFreq - minFreq) / 2) #axfreq.set_xlabel("freq: \n%.4f(%.4f) Mhz" % (freq1D, freq_error)) #axfreq.set_title("observation time: %.4f \n time: %.4f +- %.4f Min \n \n \n" % (length/60, time1D, time_error),fontsize=12) axfreq.plot(f_x, f_y, color='red', linewidth=4.0) axfreq.plot(extrapoly_f, extrapolx_f, linewidth=4.0) axfreq.locator_params(axis='x', tight=True, nbins=5) filename = archive.rsplit('.')[0] if args.savefigure: plt.savefig('%s_all.eps' % filename) plt.clf() else: plt.show()
def scatter_im( X, imfunc, zoom=1, dims_to_plot=[0, 1], ax=None, inset=False, inset_offset=0.15, inset_width_and_height=0.1, plot_range=[0.05, 99.95], inset_colors=None, inset_scatter_size=25, inset_title=None, inset_clims=None, ): # Adapted from https://stackoverflow.com/questions/22566284/matplotlib-how-to-plot-images-instead-of-points/53851017 if ax is None: ax = plt.gca() artists = [] for i in range(X.shape[0]): im = OffsetImage(imfunc(i), zoom=zoom) ab = AnnotationBbox( im, (X[i, dims_to_plot[0]], X[i, dims_to_plot[1]]), xycoords="data", frameon=False, ) artists.append(ax.add_artist(ab)) ax.update_datalim(X[:, dims_to_plot]) ax.autoscale() ax.xaxis.set_ticks_position("none") ax.yaxis.set_ticks_position("none") x_lb, x_hb = np.percentile(X[:, dims_to_plot[0]], plot_range) y_lb, y_hb = np.percentile(X[:, dims_to_plot[1]], plot_range) x_pad = (x_hb - x_lb) * 0.1 y_pad = (y_hb - y_lb) * 0.1 ax.set_xlim(x_lb - x_pad, x_hb + x_pad) ax.set_ylim(y_lb - y_pad, y_hb + y_pad) ax.set_facecolor((0, 0, 0)) nullfmt = NullFormatter() ax.xaxis.set_major_formatter(nullfmt) ax.yaxis.set_major_formatter(nullfmt) ax.axis("off") ax_inset = None if inset: offset = 0.15 inset = [ offset, 1 - inset_offset - inset_width_and_height, inset_width_and_height, inset_width_and_height, ] ax_inset = plt.axes(inset) if inset_clims is None and (not isinstance(inset_colors, str)): inset_clims = np.percentile(inset_colors, [0, 100]) ax_inset.scatter( X[:, dims_to_plot[0]], X[:, dims_to_plot[1]], s=inset_scatter_size, c=inset_colors, vmin=inset_clims[0], vmax=inset_clims[1], ) for k in ax_inset.spines: ax_inset.spines[k].set_color("w") # ax_inset.set_facecolor('w') ax_inset.xaxis.label.set_color("w") ax_inset.yaxis.label.set_color("w") ax_inset.tick_params(axis="x", colors="w") ax_inset.tick_params(axis="y", colors="w") if inset_title is not None: ax_inset.set_title(inset_title) return ax, ax_inset plt.sca(ax) return ax
def plotall(args, archive, pulsar, mjd, intensity, minFreq, maxFreq, length, extrapolx_f, extrapoly_f, fitxplot_f, extrapolx_t, extrapoly_t, fitxplot_t, freq1D, time1D, freq_error, time_error, midACF_freq, midACF_time, acf_mid, opt_f, opt_t, secondary, acffit_2D, f_end, t_end, mhzperbin, minperbin, Drift_rate, slope_visibility, sec_axes): nullfmt = NullFormatter() # no labels # definitions for the axes left_edge = 0.06 bottom_edge = 0.13 width = 0.24 height = 0.75 space = 0.005 rect_dysp = [left_edge, bottom_edge, width, height - 0.21] rect_acf = [left_edge + width + space, bottom_edge, width, height - 0.21] rect_time = [left_edge + width + space, height - 0.07, width, 0.26] rect_freq = [ left_edge + 2 * width + space, bottom_edge, 0.12, height - 0.21 ] rect_sec = [ left_edge + 2 * width + space + space + 0.18, bottom_edge, 0.25, height - 0.15 ] plt.figure(1, figsize=(14, 6)) axdysp = plt.axes(rect_dysp) axacf = plt.axes(rect_acf) axtime = plt.axes(rect_time) axfreq = plt.axes(rect_freq) axsec = plt.axes(rect_sec) axdysp.imshow(intensity, aspect='auto', extent=[0, length / 60, minFreq, maxFreq], vmax=1, vmin=-0.1, cmap='jet', interpolation='None') axdysp.set_xlabel("time(mins)") axdysp.set_ylabel("frequency(Mhz)") axdysp.set_title( "%s \n MJD %s \nTime:%.4f +- %.4f mins \n freq: %.4f +- %.4f Mhz \n observation time: %.4f mins\n \n" % (pulsar, mjd, time1D, time_error, freq1D, freq_error, length / 60), fontsize=12) axacf.yaxis.set_major_formatter(nullfmt) axacf.imshow(acf_mid, aspect='auto', extent=[ -(length / 60) / 2, (length / 60) / 2, -(maxFreq - minFreq) / 2, (maxFreq - minFreq) / 2 ]) axacf.contour(acf_mid, aspect='auto', extent=[ -(length / 60) / 2, (length / 60) / 2, (maxFreq - minFreq) / 2, -(maxFreq - minFreq) / 2 ]) axacf.contour(acffit_2D, aspect='auto', extent=[ -(length / 60) / 2, (length / 60) / 2, (maxFreq - minFreq) / 2, -(maxFreq - minFreq) / 2 ]) axacf.set_xlabel("Drift_rate=%.6f \n slope_visibility=%.6f" % (Drift_rate, slope_visibility)) t_y = np.linspace(np.min(midACF_time) - 0.1, 1.1 * np.max(midACF_time), 20) t_x = [] for i in range(20): t_x.append(np.sqrt(1 / opt_t)) axtime.xaxis.set_major_formatter(nullfmt) axtime.scatter(fitxplot_t, midACF_time) axtime.set_xlim(-(length / 60) / 2, (length / 60) / 2) timelen = len(midACF_time) axtime.set_ylim( np.min(midACF_time[int(0.25 * timelen):int(0.75 * timelen)]) - 0.1, np.max(midACF_time[int(0.25 * timelen):int(0.75 * timelen)]) + 0.1) axtime.plot(t_x, t_y, color='red', linewidth=4.0) axtime.plot(extrapolx_t, extrapoly_t, linewidth=4.0) axtime.locator_params(axis='y', tight=True, nbins=5) f_x = np.linspace(np.min(midACF_freq) - 0.1, 1.2 * np.max(midACF_freq), 20) f_y = [] for i in range(20): f_y.append(-np.sqrt(np.log(2) / opt_f)) axfreq.yaxis.set_major_formatter(nullfmt) axfreq.scatter(midACF_freq, fitxplot_f) freqlen = len(midACF_freq) axfreq.set_xlim( np.min(midACF_freq[int(0.25 * freqlen):int(0.75 * freqlen)]) - 0.1, 1.2 * np.max(midACF_freq[int(0.25 * freqlen):int(0.75 * freqlen)])) axfreq.set_ylim(-(maxFreq - minFreq) / 2, (maxFreq - minFreq) / 2) #axfreq.set_xlabel("freq: \n%.4f(%.4f) Mhz" % (freq1D, freq_error)) #axfreq.set_title("observation time: %.4f \n time: %.4f +- %.4f Min \n \n \n" % (length/60, time1D, time_error),fontsize=12) axfreq.plot(f_x, f_y, color='red', linewidth=4.0) axfreq.plot(extrapoly_f, extrapolx_f, linewidth=4.0) axfreq.locator_params(axis='x', tight=True, nbins=5) sec_mean = np.mean(secondary) axsec.imshow(secondary, aspect='auto', origin='lower', cmap='binary', extent=[sec_axes[2], sec_axes[3], sec_axes[0], sec_axes[1]], interpolation='None', vmax=sec_mean + 8, vmin=sec_mean + 2) axsec.set_xlabel(r'Fringe Frequency ($10^{-3}$Hz)') axsec.set_ylabel(r'Delay ($\mu$s)') axsec.set_title("Secondary spectra") filename = archive.rsplit('.')[0] if args.savefigure: plt.savefig('%s_all.eps' % filename) plt.clf() else: plt.show()
from yt.mods import * import yt.visualization.eps_writer as EPS from matplotlib import rc import matplotlib.pyplot as plt from matplotlib.font_manager import fontManager, FontProperties font = FontProperties(size='small') rc('font', family='serif', serif='cmr10', size=18) from matplotlib.ticker import NullFormatter nullfmt = NullFormatter() ######################################################################## # Derived fields ######################################################################## def _MyRadius(field, data): center = data.get_field_parameter("center") dx = data["x"] - center[0] dy = data["y"] - center[1] dz = data["z"] - center[2] return na.sqrt(dx * dx + dy * dy + dz * dz) add_field("Radius", function=_MyRadius, take_log=False, units='') def _MyIonizedFrac(field, data): return data['HII_Fraction'] / 0.75908798
#ax2.set_xscale('log') ax2.set_xlim(left=0.8,right=4.2) ax2.set_xticks([1,2,3,4]) ax2.xaxis.set_major_formatter(ScalarFormatter()) ax2.yaxis.set_major_formatter(ScalarFormatter()) ax2.ticklabel_format(axis='both', style='plain') ax2_sec = ax2.twiny() #ax2_sec.set_xscale('log') ax2_sec.set_xlim(ax2.get_xlim()) mass_values = np.array([13.0,14.0, 14.5, 15.0, 15.5]) new_tick_locations = peak_mass(mass_values) print(mass_values) print(new_tick_locations) ax2_sec.xaxis.set_major_formatter(NullFormatter()) ax2_sec.xaxis.set_minor_formatter(NullFormatter()) ax2_sec.tick_params(axis='x', which='minor', top=False) ax2_sec.set_xticks(new_tick_locations) ax2_sec.set_xticklabels(mass_values) #xmin,xmax=ax2.get_xlim() #print(xmin,xmax) #print(Mass_peak(xmin),Mass_peak(xmax)) #ax2_sec.set_xlim(Mass_peak(xmin),Mass_peak(xmax)) #ax2_sec.set_xscale('log') #ax2_sec.set_xticks(new_tick_locations) ax2.set_ylabel(r'$\lambda$', fontsize=12) ax2.set_xlabel(r'$\nu$ = $\delta_{\rm c}$/$\sigma$', fontsize=12) ax2_sec.set_xlabel(r'$\log_{10}$M [M$_{\odot}$/h]', fontsize=12) ax2_sec.tick_params(labelsize=12)
# Perform Locally Linear Embedding Manifold learning methods = ['standard', 'ltsa', 'hessian', 'modified'] labels = ['LLE', 'LTSA', 'Hessian LLE', 'Modified LLE'] for i, method in enumerate(methods): t0 = time() trans_data = manifold\ .LocallyLinearEmbedding(n_neighbors, 2, method=method).fit_transform(sphere_data).T t1 = time() print("%s: %.2g sec" % (methods[i], t1 - t0)) ax = fig.add_subplot(252 + i) plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow) plt.title("%s (%.2g sec)" % (labels[i], t1 - t0)) ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter()) plt.axis('tight') # Perform Isomap Manifold learning. t0 = time() trans_data = manifold.Isomap(n_neighbors, n_components=2)\ .fit_transform(sphere_data).T t1 = time() print("%s: %.2g sec" % ('ISO', t1 - t0)) ax = fig.add_subplot(257) plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow) plt.title("%s (%.2g sec)" % ('Isomap', t1 - t0)) ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter())
def cla(self): GeoAxes.cla(self) self.yaxis.set_major_formatter(NullFormatter())
def dEdxCorrelation(data_set_list, x_bins, y_bins, x_label, y_label): # Set up the plots: # definitions for the axes left, width = 0.1, 0.65 bottom, height = 0.1, 0.65 bottom_h = left_h = left + width + 0.02 rect_scatter = [left, bottom, width, height] rect_histx = [left, bottom_h, width, 0.2] rect_histy = [left_h, bottom, 0.2, height] # start with a rectangular Figure plt.figure(1, figsize=(8, 8)) axScatter = plt.axes(rect_scatter) # plt.xlabel("dE/dx [MeV/cm], {l}".format(l=branch_name_x)) plt.xlabel(x_label) # plt.ylabel("dE/dx [MeV/cm], {l}".format(l=branch_name_y)) plt.ylabel(y_label) axHistx = plt.axes(rect_histx) axHisty = plt.axes(rect_histy) nullfmt = NullFormatter() # no labels # no labels axHistx.xaxis.set_major_formatter(nullfmt) axHistx.yaxis.set_major_formatter(nullfmt) axHisty.xaxis.set_major_formatter(nullfmt) axHisty.yaxis.set_major_formatter(nullfmt) axHistx.set_ylim([0, 1.0]) axHisty.set_xlim([0, 1.0]) # Unpack the tuples and draw them: for _set in data_set_list: hist_data_x, x_bin_edges = numpy.histogram(_set.values_x, x_bins, density=True) hist_data_y, y_bin_edges = numpy.histogram(_set.values_y, y_bins, density=True) max_val = numpy.max((numpy.max(hist_data_x), numpy.max(hist_data_y))) hist_data_x *= 0.75 / max_val hist_data_y *= 0.75 / max_val x_centers = [] for i in xrange(len(x_bin_edges) - 1): x_centers.append(0.5 * (x_bin_edges[i] + x_bin_edges[i + 1])) y_centers = [] for i in xrange(len(y_bin_edges) - 1): y_centers.append(0.5 * (y_bin_edges[i] + y_bin_edges[i + 1])) # the scatter plot: axScatter.errorbar(_set.values_x, _set.values_y, xerr=_set.x_errs, yerr=_set.y_errs, ls="", marker=_set.marker, label=_set.label, c=_set.color) axHistx.errorbar( x_centers, hist_data_x, # xerr=binwidth*0.5, # yerr=electron_err_norm_x, label=_set.label, capsize=0, marker=_set.marker, color=_set.color) axHisty.errorbar( hist_data_y, y_centers, # xerr=binwidth*0.5, # yerr=electron_err_norm_x, label=_set.label, capsize=0, marker=_set.marker, color=_set.color) axScatter.legend() # # Set limits for dE/dx values axScatter.set_xlim((x_bins[0], x_bins[-1])) axScatter.set_ylim((y_bins[0], y_bins[-1])) # axScatter.grid(True) axHistx.set_xlim(axScatter.get_xlim()) axHisty.set_ylim(axScatter.get_ylim()) axHistx.grid(True) axHisty.grid(True) axScatter.grid(True) # plt.legend() plt.show()
def set_default_locators_and_formatters(self, axis): axis.set_major_locator(HlogMajorLocator()) axis.set_major_formatter(LogFormatterMathtext(10)) axis.set_minor_locator(HlogMinorLocator()) axis.set_minor_formatter(NullFormatter()) pass