def example_fewer_edges(N=100, dim=2): print('\n***mds.example_fewer_edges()***\n') print( 'Here we explore the MDS embedding for a full graph as far way edges' + 'are removed') title = 'MDS embedding for multiple proportion of edges' X = misc.disk(N, dim) colors = misc.labels(X) D = multigraph.from_coordinates(X, colors=colors) X0 = misc.disk(N, dim) * .5 for prop in [.99, .8, .6, .4, .2]: DD = multigraph.remove_edges(D, proportion=prop) mds = MDS(DD, dim=dim, verbose=1, title=title) mds.initialize(X0=X0) mds.stochastic(verbose=1, max_iters=300, approx=.99, lr=.5) mds.adaptive(verbose=1, min_step=1e-6, max_iters=300) mds.figure(title=f'proportion = {prop:0.1f}') plt.show()
def example_random_graph(N=100, dim=2): print('\n***mds.example_random_graph()***\n') print('Here we explore the MDS embedding for a random binomial graph with'+\ 'different edge probabilities.') fig, axes = plt.subplots(2, 3) #[ax.set_axis_off() for ax in axes.ravel()] plt.tight_layout() for p, ax in zip([0.01, 0.02, 0.03, 0.05, 0.1, 1.0], axes.ravel()): D = multigraph.binomial(N, p) mds = MDS(D, dim=dim, verbose=1) mds.initialize() mds.stochastic(max_iters=100, approx=.6, lr=.5) mds.agd(min_step=1e-6) mds.figureX(ax=ax, edges=True) ax.set_xlabel(f'ave. neighs. : {int(100*p)}') ax.set_title(f'stress = {mds.cost:0.2e}') ax.set_yticks([]) ax.set_xticks([]) plt.show()
def example_stochastic(N=100, dim=2): print('\n***mds.example_stochastic()***\n') Y = misc.disk(N, dim) colors = misc.labels(Y) D = multigraph.from_coordinates(Y, colors=colors) title = 'recovering random coordinates from full dissimilarity matrix ' +\ 'using SGD, same learning rate, and different approx' mds = MDS(D, dim=dim, verbose=1, title=title) mds.initialize() for approx in [1., .8, .6, .4, .2, .1]: mds.stochastic(verbose=1, lr=10.0, min_step=1e-6, approx=approx, title=f'SGD using {approx} of edges') mds.figure(title=f'approx = {approx}, time = {mds.H["time"]:0.2f}') mds.forget() plt.show()
def example_random_graph_2(N=100): print('\n***mds.example_random_graph()***\n') print('Here we explore the MDS embedding for a random binomial graph with'+\ 'different edge probabilities.') probs = [0.04, 0.05, 0.1, 0.2, 0.5, 1.0] nums = [4, 5, 10, 20, 50, 100] dims = [2, 3, 4, 5, 10, 20] error = np.empty((len(dims), len(probs))) fig = plt.figure() for i in range(len(probs)): p = probs[i] D = multigraph.binomial(N, p) for j in range(len(dims)): dim = dims[j] mds = MDS(D, dim=dim) mds.initialize() mds.stochastic(max_iters=100, approx=.3, lr=5) mds.stochastic(max_iters=100, approx=.6, lr=10) mds.stochastic(max_iters=100, approx=.9, lr=15) mds.agd(min_step=1e-8) error[j, i] = max(mds.cost, 1e-6) for i in range(len(dims)): plt.semilogy(error[i], label=f'dim {dims[i]}') plt.ylabel('MDS stress') plt.xlabel('average neighbors') plt.xticks(range(len(nums)), nums) plt.legend() plt.tight_layout plt.show()
def example_weights(N=100, dim=2): print('\n***mds.example_weights()***\n') print('Here we explore the MDS embedding for a full graph for different' + 'weights') title = 'MDS embedding for multiple weights' X = misc.disk(N, dim) colors = misc.labels(X) X0 = misc.disk(N, dim) D = multigraph.from_coordinates(X, colors=colors) mds = MDS(D, dim=dim, verbose=1, title=title) mds.initialize(X0=X0) mds.stochastic(verbose=1, max_iters=50, approx=.6, lr=50) mds.adaptive(verbose=1, min_step=1e-6, max_iters=300) mds.figure(title=f'absolute weights') multigraph.set_weights(D, scaling=.5) mds = MDS(D, dim=dim, verbose=1, title=title) mds.initialize(X0=X0) mds.stochastic(verbose=1, max_iters=50, approx=.6, lr=50) mds.adaptive(verbose=1, min_step=1e-6, max_iters=300) mds.figure(title=f'1/sqrt(Dij) weights') multigraph.set_weights(D, scaling=1) mds = MDS(D, dim=dim, verbose=1, title=title) mds.initialize(X0=X0) mds.stochastic(verbose=1, max_iters=50, approx=.6, lr=50) mds.adaptive(verbose=1, min_step=1e-6, max_iters=300) mds.figure(title=f'1/Dij weights') multigraph.set_weights(D, scaling=2) mds = MDS(D, dim=dim, verbose=1, title=title) mds.initialize(X0=X0) mds.stochastic(verbose=1, max_iters=50, approx=.6, lr=50) mds.adaptive(verbose=1, min_step=1e-6, max_iters=300) mds.figure(title=f'relative weights') plt.show()