Example #1
0
    return ' '.join(sentence)


if __name__ == '__main__':
    hidden_units = 200
    names = [preprocess(line.strip()) for line in open(sys.argv[1], 'r')]
    random.shuffle(names)
    word_counter = CountVectorizer(tokenizer=wordpunct_tokenize,
                                   stop_words=stopwords,
                                   binary=True,
                                   dtype=np.byte)
    data = word_counter.fit_transform(names)
    words = word_counter.get_feature_names()
    data = data.toarray()
    print data.shape
    _, vocab = data.shape

    n = DBN([
        Sigmoid(data.shape[1]),
        Sigmoid(hidden_units),
        Sigmoid(hidden_units / 2)
    ])
    n.fit(data, None)
    """
	visible = r.run_hidden(np.eye(hidden_units))
	out = open('assoc_words','w')
	for f in range(hidden_units):
		out.write(' '.join( words[i] for i in range(len(words)) if visible[f,i] ) )
		out.write('\n')
	"""
if __name__ == '__main__':
	hidden_units = 200
	names = [ preprocess(line.strip()) for line in open(sys.argv[1],'r') ]
	random.shuffle(names)
	word_counter = CountVectorizer(
			tokenizer=wordpunct_tokenize,
			stop_words=stopwords,
			binary=True,
			dtype=np.byte
		)
	data = word_counter.fit_transform(names)
	words = word_counter.get_feature_names()
	data = data.toarray()
	print data.shape
	_,vocab = data.shape

	n = DBN([ 
			Sigmoid(data.shape[1]),
			Sigmoid(hidden_units),
			Sigmoid(hidden_units/2)
		])
	n.fit(data,None)
	"""
	visible = r.run_hidden(np.eye(hidden_units))
	out = open('assoc_words','w')
	for f in range(hidden_units):
		out.write(' '.join( words[i] for i in range(len(words)) if visible[f,i] ) )
		out.write('\n')
	"""
Example #3
0
#build model
model = DBN(n_nodes=5005,rbm_epoch=100,max_epoch=2000, alpha=0.001)

#import training data
imgs_train = imread_collection('images/train/*.jpg')
print("Imported", len(imgs_train), "images")
print("The first one is",len(imgs_train[0]), "pixels tall, and",
     len(imgs_train[0][0]), "pixels wide")
imgs_train = [resize(x,(77,65),mode='constant', anti_aliasing=False) for x in imgs_train]
imgs_train = [rgb2gray(x) for x in imgs_train]
imgsarr_train = [x.flatten('C') for x in imgs_train]
print(np.array(imgsarr_train).shape)
'''
X = np.array([[0.2157, 0.1255, 0.4039, 1.0, 0.0941, 0.2550],
                [0.1686, 0.9529, 0.0824, 0.0980, 1.0, 0.3529],
                [0.3529, 0.0824, 0.4275, 1.0, 0.1255, 0.2941],
                [0.1255, 1.0, 0.1216, 0.0471, 1.0, 0.2431]])
'''
y = []
for i in range(250):
     y.append(1)
for i in range(250):
     y.append(0)
y = np.array([y])

model.fit(np.array(imgsarr_train), y)
model.predict(np.array([imgsarr_train[0]]))
filename = '8aug2020p250n250e100_2000a0-001_0-01_0-1.pkl'
pickle.dump(model, open(filename, 'wb'))
Example #4
0
import sys,re,random
import numpy as np
from dbn        import DBN 
from dbn.layers import *
import theano.tensor as T
import theano
if __name__ == '__main__':
	data = np.hstack((np.eye(8),np.arange(8).reshape((8,1))))
	data = np.vstack(100*(data,))
	np.random.shuffle(data)
	
	net = DBN([
				OneHotSoftmax(8),
				Sigmoid(3)
			],8,max_epochs=1000)
	net.fit(data[:,:-1],data[:,-1])
	print net.predict(np.eye(8,dtype=np.float32))

Example #5
0
import sys, re, random
import numpy as np
from dbn import DBN
from dbn.layers import *
import theano.tensor as T
import theano
if __name__ == '__main__':
    data = np.hstack((np.eye(8), np.arange(8).reshape((8, 1))))
    data = np.vstack(100 * (data, ))
    np.random.shuffle(data)

    net = DBN([OneHotSoftmax(8), Sigmoid(3)], 8, max_epochs=1000)
    net.fit(data[:, :-1], data[:, -1])
    print net.predict(np.eye(8, dtype=np.float32))