def convert_into_ellipses(data): list_of_lists = [] for weights, atoms in data: _list = [] for i, (center, sigma) in enumerate(atoms): w, h, angle = get_ellipse_from_covariance(sigma) e = patches.Ellipse(center, w, h, angle=angle, label='%d' % i) e.set_alpha(weights[i]) _list.append(e) list_of_lists.append(_list) return list_of_lists
def original_clusters(): clusters = [[(1.5,2),((0.5,0.4),(0.4,0.5)),50000], [(2,0),((0.3,0),(0,0.6)),30000], [(4.5,1),((0.9,0.2),(0.2,0.3)),20000]] artists = [] for center, cov, weight in clusters: w, h, angle = get_ellipse_from_covariance(cov) e = patches.Ellipse(center, w, h, angle=angle, color='r') e.set_alpha(1. * weight / 60000) artists.append(e) return artists
def original_clusters(): clusters = [[(1.5, 2), ((0.5, 0.4), (0.4, 0.5)), 50000], [(2, 0), ((0.3, 0), (0, 0.6)), 30000], [(4.5, 1), ((0.9, 0.2), (0.2, 0.3)), 20000]] artists = [] for center, cov, weight in clusters: w, h, angle = get_ellipse_from_covariance(cov) e = patches.Ellipse(center, w, h, angle=angle, color='r') e.set_alpha(1. * weight / 60000) artists.append(e) return artists
# return '%s, %s\n%.2f %d' % (x[3]['controller_id'], x[3]['city'], x[2], x[1]) return '\tcid = {:10s}\n\tcity = {}\n\tratio = {:.2f}\n\tin cluster points = {:d}'.format(x[3]['controller_id'], x[3]['city'], x[2], x[1]) if __name__ == '__main__': args = parse_args() all_groups = extract_all_groups(args.log_file) # with open('formateed_output.txt', 'w') as outfile: # json.dump(all_groups, outfile) centers = [tuple(x['data']) for x in json.load(open(args.centers))] sigmas = load_cluster_parameters(args.covariances) with open(args.weights) as infile: weights = json.load(infile) artists = [] orig_artists = [] for i in range(len(centers)): w, h, angle = get_ellipse_from_covariance(sigmas[i]) e = patches.Ellipse(centers[i], w, h, angle=angle) e_copy = patches.Ellipse(centers[i], w, h, angle=angle) e.set_alpha(np.power(weights[i], .4)) e_copy.set_alpha(weights[i]) artists.append(e) orig_artists.append(e_copy) X, Y, Z = shape_data(args.scored_grid) # fig = plt.figure() # ax1 = fig.add_subplot(131) # ax2 = fig.add_subplot(133) fig, (ax1, ax2) = plt.subplots(1, 2, sharey=True) fig.subplots_adjust(wspace=0) plt.setp([a.get_yticklabels() for a in fig.axes[1:]], visible=False)
def plot_score_contours(args): weights = [] centers = [] sigmas = [] if args.centers and args.covariances and args.weights: # normalize weights to sum to 1. weights = json.load(args.weights) weights = [w / sum(weights) for w in weights] weights = [0.4 * w / max(weights) for w in weights] centers = load_cluster_parameters(args.centers) sigmas = load_cluster_parameters(args.covariances) fig = plt.figure(0) ax = fig.add_subplot(111) for i in range(len(centers)): w, h, angle = get_ellipse_from_covariance(sigmas[i]) e = patches.Ellipse(centers[i], w, h, angle=angle) e.set_alpha(weights[i]) ax.add_artist(e) print i, weights[i], centers[i], sigmas[i] set_ax_limits(ax, args) x, y = zip(*centers) plt.scatter(x, y, s=weights) X, Y, Z = load_json_dump(args.scored_grid) if args.score_cap: Z = [min(z, args.score_cap) for z in Z] if args.score_lower_limit: Z = [max(z, args.score_lower_limit) for z in Z] size = int(math.sqrt(len(Z))) X = np.reshape(X, (size, size)) Y = np.reshape(Y, (size, size)) Z = np.reshape(Z, (size, size)) def format_args(i): kwargs = {} kwargs['mux'] = centers[i][0] kwargs['muy'] = centers[i][1] kwargs['sigmax'] = math.sqrt(sigmas[i][0][0]) kwargs['sigmay'] = math.sqrt(sigmas[i][1][1]) kwargs['sigmaxy'] = sigmas[i][0][1] return kwargs if len(weights): Zgaussians = weights[0] * mlab.bivariate_normal(X, Y, **format_args(0)) for i in range(1, len(centers)): Zgaussians += weights[i] * mlab.bivariate_normal( X, Y, **format_args(i)) if args.plot == 'components': CS = plt.contour(X, Y, Zgaussians, linewidth=10000, inline=1) elif args.plot == 'density': CS = plt.contour(X, Y, Z, linewidth=10000, inline=1) elif args.plot == 'difference': CS = plt.contour(X, Y, Z - Zgaussians, linewidth=10000, inline=1) if args.plot != 'noop': plt.clabel(CS, inline=1) set_plot_limits(plt, args) if args.csv and args.hist2d: args.data = pd.read_csv(args.csv) _plot_hist2d(args.data, args) if args.savefig: if args.savefig == SAVEFIG_INFER_VALUE: name = os.path.basename(args.scored_grid).rsplit('.')[0] filename = 'target/plots/{}.png'.format(name) else: filename = args.savefig print 'saving figure to - ', filename plt.savefig(filename, dpi=320) else: plt.show()
def plot_score_contours(args): weights = [] centers = [] sigmas = [] if args.centers and args.covariances and args.weights: # normalize weights to sum to 1. weights = json.load(args.weights) weights = [w / sum(weights) for w in weights] weights = [0.4 * w / max(weights) for w in weights] centers = load_cluster_parameters(args.centers) sigmas = load_cluster_parameters(args.covariances) fig = plt.figure(0) ax = fig.add_subplot(111) for i in range(len(centers)): w, h, angle = get_ellipse_from_covariance(sigmas[i]) e = patches.Ellipse(centers[i], w, h, angle=angle) e.set_alpha(weights[i]) ax.add_artist(e) print i, weights[i], centers[i], sigmas[i] set_ax_limits(ax, args) x, y = zip(*centers) plt.scatter(x, y, s=weights) X, Y, Z = load_json_dump(args.scored_grid) if args.score_cap: Z = [min(z, args.score_cap) for z in Z] if args.score_lower_limit: Z = [max(z, args.score_lower_limit) for z in Z] size = int(math.sqrt(len(Z))) X = np.reshape(X, (size, size)) Y = np.reshape(Y, (size, size)) Z = np.reshape(Z, (size, size)) def format_args(i): kwargs = {} kwargs['mux'] = centers[i][0] kwargs['muy'] = centers[i][1] kwargs['sigmax'] = math.sqrt(sigmas[i][0][0]) kwargs['sigmay'] = math.sqrt(sigmas[i][1][1]) kwargs['sigmaxy'] = sigmas[i][0][1] return kwargs if len(weights): Zgaussians = weights[0] * mlab.bivariate_normal(X, Y, **format_args(0)) for i in range(1, len(centers)): Zgaussians += weights[i] * mlab.bivariate_normal(X, Y, **format_args(i)) if args.plot == 'components': CS = plt.contour(X, Y, Zgaussians, linewidth=10000, inline=1) elif args.plot == 'density': CS = plt.contour(X, Y, Z, linewidth=10000, inline=1) elif args.plot == 'difference': CS = plt.contour(X, Y, Z - Zgaussians, linewidth=10000, inline=1) if args.plot != 'noop': plt.clabel(CS, inline=1) set_plot_limits(plt, args) if args.csv and args.hist2d: args.data = pd.read_csv(args.csv) _plot_hist2d(args.data, args) if args.savefig: if args.savefig == SAVEFIG_INFER_VALUE: name = os.path.basename(args.scored_grid).rsplit('.')[0] filename = 'target/plots/{}.png'.format(name) else: filename = args.savefig print 'saving figure to - ', filename plt.savefig(filename, dpi=320) else: plt.show()