def show(): blob_centers = np.array([(-5, -5), (0, 0), (5, 5)]) cluster_centers = np.array([(-1, 1), (0, 0), (1, -1)]) X, _ = make_blobs(n_samples=30, centers=blob_centers, shuffle=False, random_state=1234) colors = np.array(sns.color_palette(n_colors=3)) fig = plt.figure(figsize=figaspect(1)) ax = fig.gca() ax.scatter(X[:, 0], X[:, 1], c='gray', alpha=0.4) xmin, xmax = ax.get_xlim() ymin, ymax = ax.get_ylim() xx, yy = np.meshgrid(np.linspace(xmin, xmax, 100), np.linspace(ymin, ymax, 100)) mesh = np.c_[xx.ravel(), yy.ravel()] ax.scatter(mesh[:, 0], mesh[:, 1], c=colors[np.argmin(cdist(mesh, cluster_centers), axis=1)], alpha=0.2, marker='.') ax.scatter(cluster_centers[:, 0], cluster_centers[:, 1], c='white', marker='x') ax.set(xlim=(xmin, xmax), ylim=(ymin, ymax), xticks=(), yticks=()) plt.show()
def show(): x, y = make_regression(n_samples=8, n_features=1, bias=50, noise=10, random_state=1234) x = StandardScaler().fit_transform(x) model = LinearRegression().fit(x, y) a = model.coef_ b = model.intercept_ plt.figure(figsize=figaspect(1)) ax = plt.axes() ax.axhline(0, linewidth=1, color='gray') ax.axvline(0, linewidth=1, color='gray') ax.scatter(x, y, color='black', label='データ') xlim = ax.get_xlim() ylim = ax.get_ylim() xlim = (min(xlim[0], -3), max(xlim[1], 3)) ylim = (min(ylim[0], -3 * a + b), max(ylim[1], 3 * a + b)) ax.plot(xlim, a * xlim + b, label='回帰直線 $y=ax+b$') ax.hlines((a + b, b), (1, xlim[0]), (2, 0), color='gray', linestyle='dashed') ax.vlines(2, a + b, 2 * a + b, color='gray', linestyle='dashed') ax.text(1.5, (a + b) * 0.9, 1, verticalalignment='top') ax.text(2 * 1.05, 1.5 * a + b, '$a$', horizontalalignment='left') ax.legend() ax.set(xlabel='$x$', xlim=xlim, ylim=ylim, xticks=(0, ), yticks=(b, ), yticklabels=('$b$', )) ax.set_ylabel('$y$', rotation='horizontal') plt.show()
def show(): points = np.array([[1, 1], [2, 2]]) fig = plt.figure(figsize=figaspect(1)) ax = fig.gca() ax.axhline(0, color='black', alpha=0.2) ax.axvline(0, color='black', alpha=0.2) def scatter_with_annotation(ax, text, point): ax.scatter(*point, color='gray') ax.text(point[0], point[1] + 0.1, text, horizontalalignment='right') scatter_with_annotation(ax, 'A', points[0]) scatter_with_annotation(ax, 'B', points[1]) ax.annotate(None, points[0], xytext=points[1], arrowprops=dict( arrowstyle="-", color="red", alpha=0.4, connectionstyle="angle, angleA = 90, angleB = 0")) ax.text(points[1, 0], points[0, 1], 'd', size='large', color='red', horizontalalignment='left', verticalalignment='top') ax.set(xlim=(-0.5, 3), ylim=(-0.5, 3), xticks=(0, ), yticks=(0, )) plt.show()
def show(): _, (ax1, ax2) = plt.subplots( 1, 2, figsize=figaspect(1/3), sharex=True, sharey=True) v1 = venn2((2, 2, 1), set_labels=('', ''), set_colors=('lightgray', 'white'), ax=ax1) set_label(v1) v1.get_patch_by_id('11').set(facecolor='blue') ax1.set(title=r'分子$=conf( A\Rightarrow B)$') v2 = venn2((2, 2, 1), set_labels=('', ''), set_colors=('lightgray', 'blue'), ax=ax2) set_label(v2) v2.get_patch_by_id('11').set(facecolor='blue') ax2.set(title='分母$=supp( B)$') xmin, xmax = ax2.get_xlim() ymin, ymax = ax2.get_ylim() width = xmax - xmin height = ymax - ymin margin = 0.01 padding = 0.1 ax2.text(xmin + width*padding, ymax - height*padding, '$N$') ax2.add_patch(Rectangle((xmin + width*margin, ymin + height*margin), width*(1 - margin*2), height*(1 - margin*2), facecolor='lightgray', zorder=0)) plt.show()
def simulation_analysis_plot( system: systemCls, title: str = "", out_path: str = None, limits_coordinate_space: Tuple[float, float] = None, oneD_limits_potential_system_energy: Tuple[float, float] = None, limits_force: Tuple[float, float] = None, twoD_number_of_bins: int = 25, resolution_full_space=style.potential_resolution, figsize: Tuple[float, float] = figaspect(0.25) ) -> Tuple[plt.figure, str]: """ This is a wrapper function for the analysis of Parameters ---------- system title out_path limits_coordinate_space resolution_full_space Returns ------- """ if (system.nDimensions == 1): if (hasattr(system, "bias_potential") and system.bias_potential): fig, out_path = oneD_biased_simulation_analysis_plot( system=system, title=title, out_path=out_path, limits_coordinate_space=limits_coordinate_space, limits_potential_system_energy= oneD_limits_potential_system_energy, limits_force=limits_force, resolution_full_space=resolution_full_space, figsize=figsize) else: fig, out_path = oneD_simulation_analysis_plot( system=system, title=title, out_path=out_path, limits_coordinate_space=limits_coordinate_space, limits_potential_system_energy= oneD_limits_potential_system_energy, limits_force=limits_force, resolution_full_space=resolution_full_space, figsize=figsize) elif (system.nDimensions == 2): fig, out_path = twoD_simulation_analysis_plot( system=system, title=title, out_path=out_path, limits_coordinate_space=limits_coordinate_space, number_of_bins=twoD_number_of_bins, resolution_full_space=resolution_full_space, figsize=figsize) return fig, out_path
def mergepmf(pmf1, pmf2, outputname, plot=True): # merge PMFs cv_czar, pmf_czar = np.genfromtxt(pmf1, unpack=True) cv_amd, pmf_amd = np.genfromtxt(pmf2, unpack=True) #cv_count, abf_count = np.genfromtxt('deca.count', unpack = True) #abf_count_correction = calcpmf(abf_count) #pmf_amd += abf_count_correction f = interp1d(cv_amd, pmf_amd, fill_value='extrapolate', kind='quadratic') pmf_amd = f(cv_czar) pmf_total = pmf_amd + pmf_czar pmf_total_min = np.min(pmf_total) pmf_total = pmf_total - pmf_total_min output_pmf = outputname + '.pmf' headerLines = readHeaderString(pmf1) with open(output_pmf, 'w') as fp_out: for line in headerLines: fp_out.write(line) for cv, pmf in np.nditer([cv_czar, pmf_total]): fp_out.write(f'{cv:10.4f} {pmf:12.7f}\n') #np.savetxt(output_pmf, np.c_[cv_czar, pmf_total], fmt = '%10.4f %12.7f') if plot is True: # reference PMF ref_cv, ref_pmf = np.genfromtxt('../ref.dat', unpack=True) # WTM-eABF PMF wtm_cv, wtm_pmf = np.genfromtxt('wtm-eabf.pmf', unpack=True) # plotting w, h = figaspect(1 / 1.2) plt.figure(figsize=(w, h)) plt.plot(ref_cv, ref_pmf, color='black', label='Reference') plt.plot(wtm_cv, wtm_pmf, color='tab:orange', label='WTM-eABF') plt.plot(cv_czar, pmf_total, color='tab:red', label='eABF + GaMDD') plt.plot(cv_czar, pmf_czar, color='tab:blue', label='eABF part') plt.plot(cv_czar, pmf_amd, color='tab:green', label='GaMDD part') plt.xlabel('Distance (nm)') plt.ylabel('$\Delta G$ (kcal/mol)') ax = plt.gca() ax.tick_params(direction='in', which='major', length=6.0, width=1.0, top=True, right=True) ax.tick_params(direction='in', which='minor', length=3.0, width=1.0, top=True, right=True) ax.xaxis.get_major_formatter()._usetex = False ax.yaxis.get_major_formatter()._usetex = False ax.xaxis.set_minor_locator(AutoMinorLocator()) ax.yaxis.set_minor_locator(AutoMinorLocator()) ax.set_ylim(0, 30) plt.legend(prop={'size': 14}, fancybox=False, frameon=False) plt.tight_layout(pad=0.2) output_png = outputname + '.png' plt.savefig(output_png, dpi=600, transparent=False) plt.close()
def hist(scale): x = np.random.normal(size=1000000, scale=scale) fig = plt.figure(figsize=figaspect(1)) ax = fig.gca() ax.hist(x, range=(-6, 6), bins=200, density=True) ax.set(xlim=(-6, 6), ylim=(0, 1)) plt.show()
def __init__(self, error_a, error_v, angle, angle_no_small): plt.close() w, h = figaspect(1 / 2.9) fix, axes = plt.subplots(1, 3, figsize=(w, h)) self.fontsize_title = 20 self.fontsize_label = 15 plt.tight_layout(pad=3.1, w_pad=3) self.nbins = 32 self.n1 = self.plot_error_annotation(axes[0], data=error_a, title='Arousal') self.n2 = self.plot_error_annotation(axes[1], data=error_v, title='Valence') self.plot_error_angle(axes[2], data=angle, title='Emotion Angle') # self.plot_error_angle(axes[3], data=angle_no_small, title='Emotion Angle No Small') # axes[0].axvline(x=-0.225, color='red', linestyle='dashed') # axes[0].axvline(x=0.225, color='red', linestyle='dashed') # axes[1].axvline(x=-0.230, color='red', linestyle='dashed') # axes[1].axvline(x=0.230, color='red', linestyle='dashed') for ax in axes: ax.set_ylim(axes[2].get_ylim()) ax.tick_params(axis='both', which='major', labelsize=15) plt.savefig('../error_stats_angle.pdf', format='pdf') plt.show()
def plot_dSm(data, image_index, outputname): w, h = figaspect(1 / 2) plt.figure(figsize=(w, h)) x = np.arange(0, len(data), 1) plt.plot(x, data) plt.title(f'm = {image_index+1}') plt.xlabel(r'Iterations') plt.ylabel(r'$\sqrt{S^m(\delta\tau)}/\delta\tau$ (°)') ax = plt.gca() ax.set_ylim(0, 1.5) ax.tick_params(direction='in', which='major', length=6.0, width=1.0, top=True, right=True) ax.tick_params(direction='in', which='minor', length=3.0, width=1.0, top=True, right=True) ax.xaxis.set_minor_locator(AutoMinorLocator()) ax.yaxis.set_minor_locator(AutoMinorLocator()) ax.xaxis.get_major_formatter()._usetex = False ax.yaxis.get_major_formatter()._usetex = False plt.savefig(outputname, dpi=300, bbox_inches='tight', transparent=False) plt.close()
def show(): x_min, x_max = -3, 3 coef = 2 sigma = 1 x = np.arange(x_min + 1, x_max) np.random.seed(1234) e = np.random.normal(scale=sigma, size=x.size) fig = plt.figure(figsize=figaspect(1)) ax = fig.gca() ax.plot((x_min, x_max), (x_min * coef, x_max * coef), c='k', label='$f(X_{i})$') ax.scatter(x, x * coef + e, c='k', label='$(X_{i},\ y_{i})$') for i, y_hat in enumerate(x * coef): x_tmp = x_min + i + 1 ee = np.linspace(-sigma * 3, sigma * 3, 10) xx = x_tmp + norm.pdf(ee) yy = y_hat + ee lines = ax.plot(xx, yy, c='b') line_collection = ax.hlines(y_hat + e[i], x_tmp, x_tmp + norm.pdf(e[i]), color='r') if i is 0: lines[0].set(label=r'$\epsilon _{i}$') line_collection.set(label=r'$P(y_{i}\ |\ X_{i})$') ax.legend() ax.set(xlim=(x_min, x_max), xticks=(), yticks=()) plt.show()
def gen_steam_stats_graph(self, data): graph_data = data['graph'] steps = timedelta(milliseconds=graph_data['step']) timestamp = datetime.utcfromtimestamp(graph_data['start'] / 1000) plots = graph_data['data'] times = [] for _ in plots: timestamp -= steps times.append(timestamp) plt.style.use('dark_background') w, h = figaspect(1 / 3) fig, ax = plt.subplots(figsize=(w, h)) ax.grid(linestyle='-', linewidth='0.5', color='white') plt.setp(plt.plot(list(reversed(times)), plots, linewidth=4), color='#00adee') plt.title( f'Steam CM status over the last {human_timedelta(timestamp)[:-4]}', size=20) plt.axis([None, None, 0, 100]) plt.xlabel('Time (Month-Day Hour)', fontsize=20) plt.ylabel('Uptime (%)', fontsize=20) plt.tight_layout(h_pad=20, w_pad=20) buf = BytesIO() plt.savefig(buf, format='png', transparent=True) buf.seek(0) plt.close() return discord.File(buf, filename='graph.png')
def show(x, y): model = LinearSVC(C=1e30, random_state=1234) model.fit(x, y) fig = plt.figure(figsize=figaspect(1)) ax = fig.gca() ax.scatter(x[:, 0], x[:, 1], c=y) xlim = ax.get_xlim() ylim = ax.get_ylim() xx, yy = np.meshgrid(np.linspace(xlim[0], xlim[1], resolution), np.linspace(ylim[0], ylim[1], resolution)) grid = np.c_[xx.ravel(), yy.ravel()] zz = model.decision_function(grid).reshape(xx.shape) cs = ax.contour(xx, yy, zz, colors='k', levels=[-1, 0, 1], linewidths=0.8, linestyles=['--', '-', '--']) fmt = { c: '決定境界' if i is 1 else 'サポートベクター' for i, c in enumerate(cs.levels) } ax.clabel(cs, fontsize='large', fmt=fmt) ax.set(xlim=xlim, ylim=ylim, xticks=(), yticks=()) plt.show()
def plotNumpy3D(self, arr, color=None, time=None, size=None): plt.clf() w, h = figaspect(1) w *= 1.6 h = w fig = plt.figure(figsize=(w, h)) ax = fig.add_subplot(111, projection='3d') ax.view_init(azim=100, elev=60) # ax = fig.add_subplot(111) # not 3D ax.grid(linestyle='--') ax.set_axisbelow(True) ax.tick_params(axis='both', which='both', left=False, top=False, right=False, bottom=False) x = arr[:, 0] y = arr[:, 1] z = arr[:, 2] ax.scatter(x, y, z, c=('C0' if color is None else color), s=(10 if size is None else size)) ax.scatter(self.gnList[:, 0], self.gnList[:, 1], self.gnList[:, 2], c='green', s=50, alpha=0.3) # ax.scatter(x,y,c=('C0' if color is None else color), s=100)# not 3D # ax.scatter(self.gnList[:,0],self.gnList[:,1],c='green', s=50, alpha=0.2)# not 3D for i in arr: realRadiusRange = ((((self.longRange)**2) + (z[0]**2))**0.5) # p = Circle((i[0],i[1]), self.longRange, color=color, alpha=0.3, lw=0.8) p = Circle((i[0], i[1]), realRadiusRange, color=color, alpha=0.3, lw=0.8) ax.add_patch(p) art3d.pathpatch_2d_to_3d(p, z=1.5, zdir="z") # ax.set_xlim(-10,120) # ax.set_ylim(-10,120) if time is not None: if time > 0: plt.pause(time) plt.clf() else: plt.show()
def show(): bins = [10, 30] tip = sns.load_dataset('tips')['tip'] _, axes = plt.subplots(1, 2, figsize=figaspect(1 / 2)) for ax, b in zip(axes, bins): ax.hist(tip, bins=b) ax.set(title=f'bins={b}', xticks=(), yticks=()) plt.show()
def plot(df): chi2 = stats.chi2.pdf(x, df=df) plt.figure(figsize=figaspect(1)) ax = plt.axes() ax.plot(x, chi2, label=r'自由度 ${df}$ の $\chi^2$ 分布'.format(df=df)) ax.legend() ax.set(title=r'$\chi^2$ 分布', xlim=xlim, ylim=(0, 0.3)) plt.show()
def _process(self, img, savepath=None, **kwargs): if savepath is not None: H, W, _ = self.base.shape w, h = figaspect(H / W) w, h = self.img_scale * w, self.img_scale * h plt.savefig(savepath, dpi=W / w) if not self._view: plt.close(plt.gcf())
def refreshGraph(graph, node_color, fig): print("here") #plt.clf() pos = nx.get_node_attributes(graph, 'pos') w, h = figaspect(5 / 3) fig, ax = plt.subplots(figsize=(w, h)) nx.draw(graph, pos, node_color=node_color, node_size=20, ax=ax) fig.canvas.draw()
def plot_1d(data, outputname, xlabel, ylabel): plt.rcParams.update({ "pgf.texsystem": "lualatex", "font.family": "serif", # use serif/main font for text elements "text.usetex": True, # use inline math for ticks "pgf.rcfonts": False, # don't setup fonts from rc parameters "axes.labelsize": 28, "axes.linewidth": 2.0, 'axes.unicode_minus': False, "font.size": 24, "pgf.preamble": '\n'.join([ "\\usepackage{units}", "\\usepackage{metalogo}", "\\usepackage{unicode-math}", r"\setmathfont{MathJax_Math}", r"\setmainfont{FreeSans}", ]) }) w, h = figaspect(1 / 1.1) plt.figure(figsize=(w, h)) plt.plot(data[0], data[1]) # plt.title(title) plt.xlabel(xlabel) plt.ylabel(ylabel) ax = plt.gca() ax.set_xlim(-180.0, 180.0) ax.set_ylim(0, 16.0) ax.xaxis.get_major_formatter()._usetex = False ax.yaxis.get_major_formatter()._usetex = False ax.tick_params(direction='in', which='major', length=6.0, width=1.0, top=True, right=True, pad=8.0) ax.tick_params(direction='in', which='minor', length=3.0, width=1.0, top=True, right=True, pad=8.0) ax.xaxis.set_major_locator(plt.MultipleLocator(90)) ax.yaxis.set_major_locator(AutoLocator()) ax.xaxis.set_minor_locator(AutoMinorLocator()) ax.yaxis.set_minor_locator(AutoMinorLocator()) plt.savefig(outputname, dpi=300, bbox_inches='tight', transparent=False) plt.close()
def plot(i): _, (ax1, ax2) = plt.subplots(1, 2, figsize=figaspect(1 / 2), sharex=True) plot_cdf(ax1, i, norm.cdf(i)) ax1.set(xticklabels=['in'], yticklabels=[0, 'out', 1]) plot_pdf_area(ax2, np.linspace(x_min, i, resolution)) plt.show()
def plotEtaZeta(t_list, eta_list, zeta_list): # plot AC rate w, h = figure.figaspect(6.) fig, axes = plt.subplots(figsize=(h, w)) axes.set_xlabel('hours') axes.plot(t_list, zeta_list, color='steelblue', lw=0.7) axes.set_ylabel('AC rate [mas/s]') plt.savefig('zetaVStime.png') plt.close() # plot AL rate w, h = figure.figaspect(6.) fig, axes = plt.subplots(figsize=(h, w)) axes.set_xlabel('hours') axes.plot(t_list, eta_list, color='steelblue', lw=0.7) axes.set_ylabel('AL rate [mas/s]') plt.savefig('etaVStime.png') plt.close()
def plot(count): distributions = stats.norm(loc=sample[:count]) fig = plt.figure(figsize=figaspect(1)) ax = fig.gca() ax.plot(x, distributions.pdf(x)) ax.plot(x, distributions.pdf(x).sum(axis=1), color='black', alpha=0.3) sns.rugplot(sample, color='black', ax=ax) ax.set(xlabel='x', xticks=(), xlim=(x.min(), x.max()), ylim=(0, 1)) plt.show()
def plot(df): t = stats.t.pdf(x, df=df) plt.figure(figsize=figaspect(1)) ax = plt.axes() ax.plot(x, n, label='標準正規分布') ax.plot(x, t, label=f'自由度 ${df}$ の $t$ 分布') ax.legend() ax.set(title='標準正規分布と $t$ 分布', xlim=xlim, ylim=ylim) plt.show()
def plot(i): _, (ax1, ax2) = plt.subplots(1, 2, figsize=figaspect(1 / 2), sharex=True) plot_cdf(ax1, i, 1 - norm.sf(i), y_origin=1) ax1.set(xticklabels=['in'], yticklabels=[0, '', 1]) plot_pdf_area(ax2, np.linspace(i, x_max, resolution)) plt.show()
def plot(total, n_objects, n_drawn): x = np.arange(0, n_drawn + 1) distribution = stats.hypergeom(total, n_objects, n_drawn) plt.figure(figsize=figaspect(1)) ax = plt.axes() ax.vlines(x, 0, distribution.pmf(x)) ax.set(title='超幾何分布', xlabel='対象の個数', ylim=(0, 1), xticks=x) ax.set_ylabel('確率', rotation=0, horizontalalignment='right') plt.show()
def make_yyplot( ActualY, EstimatedY, YMax, YMin, EstimatedYName ): plt.figure(figsize=figure.figaspect(1)) plt.scatter(ActualY,EstimatedY) plt.plot([YMin-0.05*(YMax-YMin),YMax+0.05*(YMax-YMin)], [YMin-0.05*(YMax-YMin),YMax+0.05*(YMax-YMin)], 'k-') plt.ylim(YMin-0.05*(YMax-YMin),YMax+0.05*(YMax-YMin)) plt.xlim(YMin-0.05*(YMax-YMin),YMax+0.05*(YMax-YMin)) plt.xlabel("Actual Y") plt.ylabel(EstimatedYName) plt.show()
def plot(int_range): x_min, x_max = int_range x = np.arange(x_min, x_max + 1) plt.figure(figsize=figaspect(1)) ax = plt.axes() ax.vlines(x, 0, 1.0 / x.size) ax.set(title='離散一様分布', xlabel='取りうる値', xlim=xlim, ylim=(0, 1.1), xticks=x) ax.set_ylabel('確率', rotation=0, horizontalalignment='right') plt.show()
def plot(p): x = [0, 1] y = stats.bernoulli.pmf(x, p) plt.figure(figsize=figaspect(1)) ax = plt.axes() ax.vlines(x, 0, y) ax.set(title='ベルヌーイ分布', xlabel='取りうる値', ylim=(0, 1.1), xticks=x) ax.set_ylabel('確率', rotation=0, horizontalalignment='right') plt.show()
def figure_setup(num_subplots, aspect, scale=1): #plt.style.use('pluto-paper') fig, axs = plt.subplots( nrows=num_subplots, sharex=True, gridspec_kw={'hspace': 0}, figsize=[scale * dim for dim in figaspect(num_subplots * aspect)]) if num_subplots == 1: axs = [axs] return fig, axs
def convert_data_to_graph(self, visualize=True): # Create network graph G = nx.Graph() list_of_nodes = [] current_id = 0 for entity in self.line_data: connectivity_list = [] for point in entity: point = tuple([int(_) for _ in point]) # If we are beginning then list is empty (i.e. no nodes yet) if not list_of_nodes: list_of_nodes.append(Node(current_id, point)) connectivity_list.append(current_id) current_id += 1 else: bFoundSameNode = False for node in list_of_nodes: # Check if the point in question is the same as a node if point == node.coordinate: connectivity_list.append(node.id) bFoundSameNode = True break if not bFoundSameNode: list_of_nodes.append(Node(current_id, point)) connectivity_list.append(current_id) current_id += 1 for con_idx, connection in enumerate(connectivity_list[:-1]): for node in list_of_nodes: if node.id == connection: node.connected_to.append(connectivity_list[con_idx + 1]) for node in list_of_nodes: G.add_node(node.id, pos=node.coordinate) for node in list_of_nodes: if not node.connected_to: pass else: for node_connection in node.connected_to: ''' p0 = np.asarray(list_of_nodes[node.id].coordinate) p1 = np.asarray(list_of_nodes[node_connection].coordinate) weight = np.linalg.norm(p0 - p1) G.add_edge(node.id, node_connection, weight=weight) ''' G.add_edge(node.id, node_connection) if visualize: pos = nx.get_node_attributes(G, 'pos') w, h = figaspect(5 / 3) fig, ax = plt.subplots(figsize=(w, h)) nx.draw(G, pos, node_size=20, ax=ax) plt.show() return G
def costmap_plot(arr): # using costmap_plot() to plot the background first (with resolution of (map_size/map_res)^2), then in update_plot the color(zz, as probability) is updated. w, h = figaspect(.8) fig = plt.figure(figsize=(w,h)) ani = animation.FuncAnimation(fig, update_plot, interval = 1) plt.show()
def __init__(self, grid, animation_step, step_interval, arrow_for_potential): self.grid = grid self.animation_step = animation_step self.arrow_potential = arrow_for_potential #Setup subplots aspect = figaspect(1) self.figure, self.axis = plt.subplots(figsize=1.5 * aspect) self.arrows = self.place_arrows() self.animated_matrix = self.axis.matshow(self.grid.mat, vmin=Cell.RESERVED, vmax=Direction.RIGHT, cmap=cm.Paired) self.animation = animation.FuncAnimation(self.figure, self.update_view, self.animation_step, init_func=self.update_view, interval=step_interval, save_count=0, blit=True)
def view(structure, outdoor_lux=109870., indoor_lux=500., show_original=False): ''' Experimental. Lux ratio is is buggy Default outdoor lux is for AM1.5 and indoor lux for office lighting. See https://en.wikipedia.org/wiki/Daylight and https://en.wikipedia.org/wiki/Lux ''' if not structure._color_calculated: structure.calculate_color() T_image = mpimg.imread(sup_path + "test_outdoor.png") R_image = mpimg.imread(sup_path + "test_indoor.png") T_filter = np.array(structure.T_color, float) R_filter = np.array(structure.R_color, float) T_image_after = T_image * T_filter R_image_after = R_image * R_filter if outdoor_lux > indoor_lux: R_image_after *= indoor_lux / outdoor_lux else: T_image_after *= outdoor_lux / indoor_lux overlay = T_image_after + R_image_after f, ax = plt.subplots(nrows=1, ncols=1) f.suptitle(structure.label) ax.set_title("Outdoor lux: {:d} Indoor lux: {:d}".format(int(outdoor_lux), int(indoor_lux))) ax.imshow(overlay) ax.axis("off") if show_original: w, h = figaspect(2.) f2, ((ax1), (ax2)) = plt.subplots(nrows=2, ncols=1, sharex='col', figsize=(w,h)) f2.subplots_adjust(left=0.03, right=0.97, hspace=0.0, wspace=0.0) ax1.axis("off") ax2.axis("off") ax1.set_aspect("equal") ax2.set_aspect("equal") ax1.imshow(T_image) ax2.imshow(R_image)
Y_pred_ceiling = sp.maximum(Y_pred_ceiling, 1) kappa_cdf_cv[run, fold] = quadratic_weighted_kappa(Y_valid, Y_pred_cdf) kappa_rounding_cv[run, fold] = quadratic_weighted_kappa(Y_valid, Y_pred_rounding) kappa_ceiling_cv[run, fold] = quadratic_weighted_kappa(Y_valid, Y_pred_ceiling) print("Kappa using cdf decoding method: %.6f (%.6f)" % (np.mean(kappa_cdf_cv), np.std(kappa_cdf_cv))) print("Kappa using rounding decoding method: %.6f (%.6f)" % (np.mean(kappa_rounding_cv), np.std(kappa_rounding_cv))) print("Kappa using ceiling decoding method: %.6f (%.6f)" % (np.mean(kappa_ceiling_cv), np.std(kappa_ceiling_cv))) ################### ## Visualization ## ################### w, h = figaspect(1) plt.figure(figsize=(w,h)) f, axarr = plt.subplots(4,4, sharex=True) ## raw prediction axarr[0,0].hist(Y_pred_raw[Y_valid==1]) axarr[0,0].set_title("Relevance = 1") axarr[0,0].set_ylabel("Raw") axarr[0,1].hist(Y_pred_raw[Y_valid==2]) axarr[0,1].set_title("Relevance = 2") axarr[0,2].hist(Y_pred_raw[Y_valid==3]) axarr[0,2].set_title("Relevance = 3") axarr[0,3].hist(Y_pred_raw[Y_valid==4]) axarr[0,3].set_title("Relevance = 4") ## rounding decoding axarr[1,0].hist(Y_pred_rounding[Y_valid==1]) axarr[1,0].set_ylabel("Rounding")
def main(): # collect argvs log_file = sys.argv[1] if len(sys.argv) > 2: pdf_file = sys.argv[2] else: pdf_file = sys.argv[1][:-4] + ".pdf" # fetch training and validation iteration cmd = ( "cat %s | grep 'solver.cpp:231] Iteration ' | awk '{print $6}' | awk -F',' '{print $1}' > train_iteration.tmp" % log_file ) subprocess.call(cmd, shell=True) cmd = ( "cat %s | grep 'solver.cpp:287] Iteration ' | awk '{print $6}' | awk -F',' '{print $1}' > valid_iteration.tmp" % log_file ) subprocess.call(cmd, shell=True) # fetch training and validation logloss cmd = "cat %s | grep 'Train net output #3: loss_fine = ' | awk '{print $11}' > train_logloss.tmp" % log_file subprocess.call(cmd, shell=True) cmd = "cat %s | grep 'Test net output #3: loss_fine =' | awk '{print $11}' > valid_logloss.tmp" % log_file subprocess.call(cmd, shell=True) # fetch training and validation accuracy cmd = "cat %s | grep 'Train net output #1: accuracy_fine = ' | awk '{print $11}' > train_accuracy.tmp" % log_file subprocess.call(cmd, shell=True) cmd = "cat %s | grep 'Test net output #1: accuracy_fine =' | awk '{print $11}' > valid_accuracy.tmp" % log_file subprocess.call(cmd, shell=True) train_logloss = np.loadtxt("train_logloss.tmp") valid_logloss = np.loadtxt("valid_logloss.tmp") train_accuracy = 100 * np.loadtxt("train_accuracy.tmp") valid_accuracy = 100 * np.loadtxt("valid_accuracy.tmp") train_iteration = np.loadtxt("train_iteration.tmp", dtype=int) valid_iteration = np.loadtxt("valid_iteration.tmp", dtype=int) valid_logloss_min = np.min(valid_logloss) valid_accuracy_min = np.min(valid_accuracy) best_ind = np.where(valid_logloss == valid_logloss_min)[0][0] best_iter = valid_iteration[best_ind] # plot training and validation logloss w, h = figaspect(0.5) plt.figure(figsize=(w, h)) f, axarr = plt.subplots(2, sharex=True) # logloss axarr[0].plot(train_iteration, train_logloss) axarr[0].plot(valid_iteration, valid_logloss) axarr[0].plot(train_iteration, valid_logloss_min * np.ones((len(train_iteration)))) axarr[0].set_title("LogLoss vs Iteration") axarr[0].set_ylabel("LogLoss") axarr[0].legend( ["Train", "Valid (Min = %s at Iter. = %s)" % (np.round(valid_logloss_min, 5), best_iter)], loc="upper right" ) # accuracy axarr[1].plot(train_iteration, train_accuracy) axarr[1].plot(valid_iteration, valid_accuracy) axarr[1].plot(train_iteration, valid_accuracy[best_ind] * np.ones((len(train_iteration)))) axarr[1].set_title("Accuracy vs Iteration") axarr[1].set_xlabel("Iteration") axarr[1].set_ylabel("Accuracy [%]") axarr[1].legend( ["Train", "Valid (Acc = %s%% at Iter. = %s)" % (np.round(valid_accuracy[best_ind], 2), best_iter)], loc="lower right", ) # fmt = '%.0f%%' # yticks = mtick.FormatStrFormatter(fmt) # axarr[1].yaxis.set_major_formatter(yticks) # plt.tight_layout() plt.savefig(pdf_file) print("Save pdf figure to %s" % pdf_file) # cleanup cmd = "rm -rf train_iteration.tmp" subprocess.call(cmd, shell=True) cmd = "rm -rf valid_iteration.tmp" subprocess.call(cmd, shell=True) cmd = "rm -rf train_logloss.tmp" subprocess.call(cmd, shell=True) cmd = "rm -rf valid_logloss.tmp" subprocess.call(cmd, shell=True) cmd = "rm -rf train_accuracy.tmp" subprocess.call(cmd, shell=True) cmd = "rm -rf valid_accuracy.tmp" subprocess.call(cmd, shell=True)
def runtest(lmaManager=None, lma_view=None, HDFmanagers=None): # colormap = get_cmap('gist_yarg_r') colormap = get_cmap('gist_earth') density_maxes = [] total_counts = [] all_t = [] for delta_minutes in minute_intervals: time_delta = DateTimeDelta(0, 0, delta_minutes, 0) n_frames = int(ceil((end_time - start_time) / time_delta)) n_cols = 6 n_rows = int(ceil( float(n_frames) / n_cols )) w, h = figaspect(float(n_rows)/n_cols) xedge=np.arange(b.x[0], b.x[1]+dx, dx) yedge=np.arange(b.y[0], b.y[1]+dy, dy) x_range = b.x[1] - b.x[0] y_range = b.y[1] - b.y[0] min_count, max_count = 1, max_count_baseline*delta_minutes f = figure(figsize=(w,h)) p = small_multiples_plot(fig=f, rows=n_rows, columns=n_cols) p.label_edges(True) for ax in p.multiples.flat: ax.yaxis.set_major_formatter(kilo_formatter) ax.xaxis.set_major_formatter(kilo_formatter) for i in range(n_frames): frame_start = start_time + i*time_delta frame_end = frame_start + time_delta b.sec_of_day = (frame_start.abstime, frame_end.abstime) b.t = (frame_start, frame_end) do_plot = False flash_extent_density = True density = None if source_density==True: lmaManager.refresh(b) lma_view.transformed.cache_is_old() x,y,t=lma_view.transformed['x','y','t'] density,edges = np.histogramdd((x,y), bins=(xedge,yedge)) do_plot=True else: for lmaManager in HDFmanagers: # yes, loop through every file every time and reselect data. # so wrong, yet so convenient. h5 = lmaManager.h5file if flash_extent_density == False: lmaManager.refresh(b) lma_view = AcuityView(DataSelection(lmaManager.data, b), mapProj, bounds=b) # lma_view.transformed.cache_is_old() x,y,t=lma_view.transformed['x','y','t'] if x.shape[0] > 1: do_plot = True break else: # assume here that the bounds sec_of_day day is the same as # the dataset day t0, t1 = b.sec_of_day # events = getattr(h5.root.events, lmaManager.table.name)[:] # flashes = getattr(h5.root.flashes, lmaManager.table.name)[:] event_dtype = getattr(h5.root.events, lmaManager.table.name)[0].dtype events_all = getattr(h5.root.events, lmaManager.table.name)[:] flashes = getattr(h5.root.flashes, lmaManager.table.name) def event_yielder(evs, fls): these_events = [] for fl in fls: if ( (fl['n_points']>9) & (t0 < fl['start']) & (fl['start'] <= t1) ): these_events = evs[evs['flash_id'] == fl['flash_id']] if len(these_events) <> fl['n_points']: print 'not giving all ', fl['n_points'], ' events? ', these_events.shape for an_ev in these_events: yield an_ev # events = np.fromiter((an_ev for an_ev in ( events_all[events_all['flash_id'] == fl['flash_id']] # for fl in flashes if ( # (fl['n_points']>9) & (t0 < fl['start']) & (fl['start'] <= t1) # ) # ) ), dtype=event_dtype) events = np.fromiter(event_yielder(events_all, flashes), dtype=event_dtype) # print events['flash_id'].shape ### Flash extent density ### x,y,z = mapProj.fromECEF( *geoProj.toECEF(events['lon'], events['lat'], events['alt']) ) # Convert to integer grid coordinate bins # 0 1 2 3 # | | | | | # -1.5 0.0 1.5 3.0 4.5 if x.shape[0] > 1: density, edges = extent_density(x,y,events['flash_id'].astype('int32'), b.x[0], b.y[0], dx, dy, xedge, yedge) do_plot = True break # print 'density values: ', density.min(), density.max() if do_plot == True: # need some data # density,edges = np.histogramdd((x,y), bins=(xedge,yedge)) density_plot = p.multiples.flat[i].pcolormesh(xedge,yedge, np.log10(density.transpose()), vmin=-0.2, vmax=np.log10(max_count), cmap=colormap) label_string = frame_start.strftime('%H%M:%S') text_label = p.multiples.flat[i].text(b.x[0]-pad+x_range*.01, b.y[0]-pad+y_range*.01, label_string, color=(0.5,)*3, size=6) density_plot.set_rasterized(True) density_maxes.append(density.max()) total_counts.append(density.sum()) all_t.append(frame_start) print label_string, x.shape, density.max(), density.sum() color_scale = ColorbarBase(p.colorbar_ax, cmap=density_plot.cmap, norm=density_plot.norm, orientation='horizontal') # color_scale.set_label('count per pixel') color_scale.set_label('log10(count per pixel)') # moving reference frame correction. all panels will have same limits, based on time of last frame view_dt = 0.0 # (frame_start - t0).seconds x_ctr = x0 + view_dt*u y_ctr = y0 + view_dt*v view_x = (x_ctr - view_dx/2.0 - pad, x_ctr + view_dx/2.0 + pad) view_y = (y_ctr - view_dy/2.0 - pad, y_ctr + view_dy/2.0 + pad) # view_x = (b.x[0]+view_dt*u, b.x[1]+view_dt*u) # view_y = (b.y[0]+view_dt*v, b.y[1]+view_dt*v) # print 'making timeseries', # time_series = figure(figsize=(16,9)) # ts_ax = time_series.add_subplot(111) # ts_ax.plot_date(mx2num(all_t),total_counts,'-', label='total sources', tz=tz) # ts_ax.plot_date(mx2num(all_t),density_maxes,'-', label='max pixel', tz=tz) # ts_ax.xaxis.set_major_formatter(time_series_x_fmt) # ts_ax.legend() # time_filename = 'out/LMA-timeseries_%s_%5.2fkm_%5.1fs.pdf' % (start_time.strftime('%Y%m%d_%H%M%S'), dx/1000.0, time_delta.seconds) # time_series.savefig(time_filename) # print ' ... done' print 'making multiples', p.multiples.flat[0].axis(view_x+view_y) filename = 'out/LMA-density_%s_%5.2fkm_%5.1fs.pdf' % (start_time.strftime('%Y%m%d_%H%M%S'), dx/1000.0, time_delta.seconds) f.savefig(filename, dpi=150) print ' ... done' f.clf() return events