def run(self, epochs=1, learning_rate=1.5, regularization=0.0, momentum=0.1):
        processor = NN_process.unsortedPairProcessor('../data/pairs/sets/enwiki_pairs_20-train.txt', '../data/pairs/sets/enwiki_no_pairs_20-train.txt',
                                  '../data/model/docfreq.npy', '../data/model/minimal', WORDS, EMBEDDING_DIM, BATCH_SIZE)
        train_x1 = theano.shared(value=processor.x1, name='train_x1', borrow=False)
        train_x2 = theano.shared(value=processor.x2, name='train_x2', borrow=False)
        train_y = theano.shared(value=processor.y, name='train_y', borrow=False)
        train_z = theano.shared(value=processor.z, name='train_z', borrow=False)

        print 'Initializing train function...'
        train = self.train_function_momentum(train_x1, train_x2, train_y, train_z)

        t = Thread(target=processor.process)
        t.start()
        for e in xrange(epochs):
            processor.new_epoch()

            processor.lock.acquire()
            while not processor.ready:
                processor.lock.wait()
            processor.lock.release()

            train_x1.set_value(processor.x1, borrow=False)
            train_x2.set_value(processor.x2, borrow=False)
            train_y.set_value(processor.y, borrow=False)
            train_z.set_value(processor.z, borrow=False)

            processor.lock.acquire()
            processor.cont = True
            processor.ready = False
            processor.lock.notifyAll()
            processor.lock.release()

            for b in xrange(BATCHES):
                #c = []
                cost = train(lr=learning_rate, reg=regularization, mom=momentum)
                #c.append(cost)

                print 'Training, batch %d, cost %.5f' % (b, cost)
                print numpy.transpose(self.model1.W.get_value())

                processor.lock.acquire()
                while not processor.ready:
                    processor.lock.wait()
                processor.lock.release()

                train_x1.set_value(processor.x1, borrow=False)
                train_x2.set_value(processor.x2, borrow=False)
                train_y.set_value(processor.y, borrow=False)
                train_z.set_value(processor.z, borrow=False)

                processor.lock.acquire()
                processor.cont = True
                processor.ready = False
                if b == BATCHES-1 and e == epochs-1:
                    processor.stop = True
                processor.lock.notifyAll()
                processor.lock.release()

            #print 'Training, epoch %d, cost %.5f' % (e, numpy.mean(c))

        self.save_me('run2.npy')