Ejemplo n.º 1
0
# n_clusters = 10
# pencluster = n2d.n2d(x, nclust = n_clusters, ae_args = {"act":"relu"})
#
# pencluster.preTrainEncoder(weight_id="pendigits")
#
# manifoldGMM = n2d.UmapGMM(n_clusters, umapdim=n_clusters)
#
# pencluster.predict(manifoldGMM)
#
##pencluster.visualize(y, names=None, dataset = "pendigits", nclust = n_clusters)
# print(pencluster.assess(y))

x, y = data.load_mnist()

n_clusters = 10
manifoldGMM = n2d.UmapGMM(n_clusters, umapN=10)
mnistcluster = n2d.n2d(x, manifoldGMM, ndim=n_clusters)

print_summary(mnistcluster.autoencoder.Model)

mnistcluster.fit(weights="weights/mnist-1000-ae_weights.h5", patience=None)

preds = mnistcluster.predict()
mnistcluster.visualize(y, None, nclust=n_clusters)
plt.show()

print(mnistcluster.assess(y))

x_test, y_test = data.load_mnist_test()

# assign new variables in same embedding, transform using autoencoder -> put in
Ejemplo n.º 2
0
from n2d import datasets as data

plt.style.use(["seaborn-white", "seaborn-paper"])
sns.set_context("paper", font_scale=1.3)

os.environ["PYTHONHASHSEED"] = "0"
os.environ["TF_CUDNN_USE_AUTOTUNE"] = "0"
rn.seed(0)
np.random.seed(0)


x, y = data.load_mnist()

n_clusters = 10

manifoldGMM = n2d.UmapGMM(n_clusters, umap_neighbors=10)
ae = n2d.AutoEncoder(x.shape[-1], n_clusters)
mnistcluster = n2d.n2d(ae, manifoldGMM)

# fit
mnistcluster.fit(x, weight_id="weights/mnist-1000-ae_weights.h5", patience=None)
preds_0 = mnistcluster.predict(x)

# fit_predict
preds_1 = mnistcluster.fit_predict(
    x, weights="weights/mnist-1000-ae_weights.h5", patience=None
)
mnistcluster.visualize(y, None, n_clusters)
plt.show()

mnistcluster.assess(y)
Ejemplo n.º 3
0
# First party modules
import n2d
from n2d import datasets as data

plt.style.use(["seaborn-white", "seaborn-paper"])
sns.set_context("paper", font_scale=1.3)

# load up data
x, y, y_names = data.load_fashion()

# define number of clusters
n_clusters = 10

# set up manifold learner
umapgmm = n2d.UmapGMM(n_clusters)


# set up parameters for denoising autoencoder
def add_noise(x, noise_factor):
    x_clean = x
    x_noisy = x_clean + noise_factor * np.random.normal(
        loc=0.0, scale=1.0, size=x_clean.shape)
    x_noisy = np.clip(x_noisy, 0.0, 1.0)
    return x_noisy


# define stages of networks
hidden_dims = [500, 500, 2000]
input_dim = x.shape[-1]
inputs = Input(input_dim)
Ejemplo n.º 4
0
            )

            self.Model.save_weights(
                "weights/" + weightname + "-" + str(pretrain_epochs) + "-ae_weights.h5"
            )
        else:
            self.Model.load_weights(weights)


# proof of concept clustering
n_clusters = 12


model = n2d.n2d(
    train_x,
    manifoldLearner=n2d.UmapGMM(n_clusters, umapdim=12, umapN=20),
    ndim=20,
    ae_args={"act": "relu"},
)
model.fit(weights="sp_500-20", pretrain_epochs=50)
model.predict(np.asarray(test_x.values).T)

resD = {"name": test_x.columns, "cluster": model.preds}
pivot = pd.DataFrame.from_dict(resD)

# make some pretty plots
def plot_cluster(x, piv, n, r=None, c=None):
    cl = list(piv[piv.cluster == n].name.values)
    x[cl].plot(legend=False)

Ejemplo n.º 5
0
Archivo: main.py Proyecto: rymc/N2D-OOP
plt.style.use(['seaborn-white', 'seaborn-paper'])
sns.set_context("paper", font_scale=1.3)
matplotlib.use('agg')

import tensorflow as tf
from keras import backend as K

import datasets as data
x, y, y_names = data.load_har()

n_clusters = 6
harcluster = nd.n2d(x, nclust=n_clusters)

harcluster.preTrainEncoder(weights="har-1000-ae_weights.h5")

manifoldGMM = nd.UmapGMM(n_clusters)

harcluster.predict(manifoldGMM)

harcluster.visualize(y, y_names, dataset="har", nclust=n_clusters)
print(harcluster.assess(y))

from sklearn.cluster import SpectralClustering
import umap


class UmapSpectral:
    def __init__(self,
                 nclust,
                 umapdim=2,
                 umapN=10,