def draw(self): import matplotlib.pyplot as plt plt, fig, ax = self.draw_chlcv(plt) if self.window != None: plt, fig, ax = self.draw_move_average_line(plt, fig, ax) if self.lines != []: plt, fig, ax = draw_line(plt, fig, ax, self.lines) if self.small_peak_info != None: plt, fig, ax = draw_peak(plt, fig, ax, self.small_peak_info, "small") if self.large_peak_info != None: plt, fig, ax = draw_peak(plt, fig, ax, self.large_peak_info, "large") if self.vertical_lines != []: plt, fig, ax = draw_vertical_line(plt, fig, ax, self.vertical_lines) ax.legend(fontsize=12) plt.title("{}_{}".format(self.method_name, self.code)) save_path = self.save(plt) plt.close(fig) return save_path
def plot_progress_k_means(X, history_centroids, idx, K, i): """ Helper function that displays the progress of k-Means as it is running. It is intended for use only with 2D data. Parameters ---------- X : ndarray, shape (n_samples, n_features) Samples, where n_samples is the number of samples and n_features is the number of features. history_centroids : ndarray, shape (n_max_iters, K, n_features) The history of centroids assignment. idx : ndarray, shape (n_samples, 1) Centroid assignments. K : int The number of centroids. i : int Current iteration count. """ plot_data_points(X, idx, K) plt.plot(history_centroids[0:i+1, :, 0], history_centroids[0:i+1, :, 1], linestyle='', marker='x', markersize=10, linewidth=3, color='k') plt.title('Iteration number {}'.format(i + 1)) for centroid_idx in range(history_centroids.shape[1]): for iter_idx in range(i): draw_line(history_centroids[iter_idx, centroid_idx, :], history_centroids[iter_idx + 1, centroid_idx, :])
def test_draw_line(self): screen = "\0" * 10 draw_line(screen, 16, 8, 15, 2) print ' '.join(format(ord(byte), 'b') for byte in screen)
plt.ylim(2, 8) plt.gca().set_aspect('equal', adjustable='box') plt.show() # =============== Part 2: Principal Component Analysis =============== print 'Running PCA on example dataset.' # Before running PCA, it is important to first normalize X X_norm, mu, sigma = feature_normalize(X) # Run PCA U, S, V = pca(X_norm) plt.figure() draw_line(mu, mu + 1.5 * S[0] * U[:,0].T) draw_line(mu, mu + 1.5 * S[1] * U[:,1].T) plt.show() print 'Top eigenvector:' print 'U = ', U[:, 0] print '(you should expect to see -0.70710678 -0.70710678)' # =================== Part 3: Dimension Reduction =================== print 'Dimension reduction on example dataset.' # Plot the normalized dataset (returned from pca) plt.figure() plt.scatter(X_norm[:, 0], X_norm[:, 1], facecolors='none', edgecolors='b') plt.xlim(-4, 3)
from sina.exercise import * from draw_home_line import draw_home_line from draw_home_pie1 import draw_pie1 from draw_home_pie2 import draw_pie2 from draw_home_pie3 import draw_pie3 from draw_home_pie4 import draw_pie4 from draw_home_pie5 import draw_pie5 from draw_home_pie6 import draw_pie6 from draw_home_pie7 import draw_pie7 from draw_home_pie8 import draw_pie8 from draw_bar import draw_bar from draw_line import draw_line from draw_pie import draw_pie if __name__ == '__main__': draw_pie1('/home/gu/PycharmProjects/dbtest2/static/drawjs/home_pie1.js') # home的扇形图 draw_pie2('/home/gu/PycharmProjects/dbtest2/static/drawjs/home_pie2.js') # home的扇形图 draw_pie3('/home/gu/PycharmProjects/dbtest2/static/drawjs/home_pie3.js') # home的扇形图 draw_pie4('/home/gu/PycharmProjects/dbtest2/static/drawjs/home_pie4.js') # home的扇形图 draw_pie5('/home/gu/PycharmProjects/dbtest2/static/drawjs/home_pie5.js') # home的扇形图 draw_pie6('/home/gu/PycharmProjects/dbtest2/static/drawjs/home_pie6.js') # home的扇形图 draw_pie7('/home/gu/PycharmProjects/dbtest2/static/drawjs/home_pie7.js') # home的扇形图 draw_pie8('/home/gu/PycharmProjects/dbtest2/static/drawjs/home_pie8.js') # home的扇形图 draw_home_line('/home/gu/PycharmProjects/dbtest2/static/drawjs/home_line.js') # home的折线图 draw_bar('/home/gu/PycharmProjects/dbtest2/static/drawjs/bar.js') # 第二个页面的开心柱状图 draw_line('/home/gu/PycharmProjects/dbtest2/static/drawjs/line.js') # 第二个页面的开心折线图 draw_pie('/home/gu/PycharmProjects/dbtest2/static/drawjs/pie.js') # 第二个页面的开心扇形图图